Files
TalkEdit/frontend/src/components/WaveformTimeline.tsx
2026-04-03 11:38:58 -06:00

768 lines
29 KiB
TypeScript

import { useRef, useEffect, useCallback, useState } from 'react';
import { useEditorStore } from '../store/editorStore';
import { AlertTriangle } from 'lucide-react';
const RULER_H = 20; // px reserved at top of canvas for the time ruler
function formatTime(secs: number): string {
const m = Math.floor(secs / 60);
const s = secs % 60;
if (m > 0) return `${m}:${String(Math.floor(s)).padStart(2, '0')}.${Math.floor((s % 1) * 10)}`;
return `${s.toFixed(1)}s`;
}
function pickInterval(pxPerSec: number): { major: number; minor: number } {
const NICE = [0.05, 0.1, 0.25, 0.5, 1, 2, 5, 10, 15, 30, 60, 120, 300, 600];
let major = NICE[NICE.length - 1];
for (const n of NICE) {
if (n * pxPerSec >= 70) { major = n; break; }
}
let minor = major;
for (const n of NICE) {
if (n * pxPerSec >= 6 && n < major) { minor = n; }
}
return { major, minor };
}
export default function WaveformTimeline({ cutMode, muteMode }: { cutMode: boolean; muteMode: boolean }) {
const waveCanvasRef = useRef<HTMLCanvasElement>(null);
const headCanvasRef = useRef<HTMLCanvasElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const [audioError, setAudioError] = useState<string | null>(null);
const videoUrl = useEditorStore((s) => s.videoUrl);
const videoPath = useEditorStore((s) => s.videoPath);
const backendUrl = useEditorStore((s) => s.backendUrl);
const duration = useEditorStore((s) => s.duration);
const deletedRanges = useEditorStore((s) => s.deletedRanges);
const cutRanges = useEditorStore((s) => s.cutRanges);
const muteRanges = useEditorStore((s) => s.muteRanges);
const setCurrentTime = useEditorStore((s) => s.setCurrentTime);
const addCutRange = useEditorStore((s) => s.addCutRange);
const addMuteRange = useEditorStore((s) => s.addMuteRange);
const updateCutRange = useEditorStore((s) => s.updateCutRange);
const updateMuteRange = useEditorStore((s) => s.updateMuteRange);
const removeCutRange = useEditorStore((s) => s.removeCutRange);
const removeMuteRange = useEditorStore((s) => s.removeMuteRange);
const audioContextRef = useRef<AudioContext | null>(null);
const audioBufferRef = useRef<AudioBuffer | null>(null);
const zoomRef = useRef(1); // 1 = show all, >1 = zoomed in
const scrollSecsRef = useRef(0); // seconds scrolled from left
const rafRef = useRef(0);
// Ref so the RAF loop can call drawStaticWaveform without a stale closure
const drawStaticWaveformRef = useRef<() => void>(() => {});
const isDraggingRef = useRef(false);
const [isDragging, setIsDragging] = useState(false);
const selectionStartRef = useRef<number | null>(null);
const [selectionStart, setSelectionStart] = useState<number | null>(null);
const [selectionEnd, setSelectionEnd] = useState<number | null>(null);
const [selectedZone, setSelectedZone] = useState<{type: 'cut' | 'mute', id: string} | null>(null);
const [editingZone, setEditingZone] = useState<{type: 'cut' | 'mute', id: string, edge: 'start' | 'end' | 'move'} | null>(null);
const [hoverCursor, setHoverCursor] = useState<string>('crosshair');
const editingZoneRef = useRef<{type: 'cut' | 'mute', id: string, edge: 'start' | 'end' | 'move'} | null>(null);
useEffect(() => {
if (!videoUrl || !videoPath) return;
setAudioError(null);
const loadAudio = async () => {
try {
const waveformUrl = `${backendUrl}/audio/waveform?path=${encodeURIComponent(videoPath!)}`;
console.log('[WaveformTimeline] backendUrl:', backendUrl, '| videoPath:', videoPath);
console.log('[WaveformTimeline] Fetching:', waveformUrl);
const ctx = new AudioContext();
audioContextRef.current = ctx;
const response = await fetch(waveformUrl);
if (!response.ok) {
const body = await response.text().catch(() => '');
console.error(
`[WaveformTimeline] Fetch failed — HTTP ${response.status} ${response.statusText}`,
{ url: waveformUrl, body }
);
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const contentType = response.headers.get('content-type') ?? 'unknown';
const contentLength = response.headers.get('content-length');
console.log(
`[WaveformTimeline] Fetch OK — content-type: ${contentType}, size: ${contentLength ?? 'unknown'} bytes`
);
const arrayBuffer = await response.arrayBuffer();
console.log(`[WaveformTimeline] ArrayBuffer size: ${arrayBuffer.byteLength} bytes`);
if (arrayBuffer.byteLength === 0) {
throw new Error('Server returned an empty file');
}
let audioBuffer: AudioBuffer;
try {
audioBuffer = await ctx.decodeAudioData(arrayBuffer);
} catch (decodeErr) {
console.error(
'[WaveformTimeline] decodeAudioData failed — browser cannot decode this format.',
{
contentType,
byteLength: arrayBuffer.byteLength,
videoPath,
error: decodeErr,
}
);
throw new Error(
`Browser could not decode audio (${contentType}). ` +
`For best compatibility use MP4/AAC or WebM/Opus. Raw error: ${decodeErr}`
);
}
console.log(
`[WaveformTimeline] Decoded OK — duration: ${audioBuffer.duration.toFixed(2)}s, ` +
`channels: ${audioBuffer.numberOfChannels}, sampleRate: ${audioBuffer.sampleRate}Hz`
);
audioBufferRef.current = audioBuffer;
drawStaticWaveform();
} catch (err) {
console.error('[WaveformTimeline] Waveform load failed:', err);
const waveformUrl2 = `${backendUrl}/audio/waveform?path=${encodeURIComponent(videoPath ?? '')}`;
setAudioError(`Waveform unavailable — ${err instanceof Error ? err.message : 'audio could not be decoded'} [URL: ${waveformUrl2}]`);
}
};
loadAudio();
return () => {
audioContextRef.current?.close();
};
}, [videoUrl, videoPath, backendUrl]);
const drawStaticWaveform = useCallback(() => {
const canvas = waveCanvasRef.current;
const buffer = audioBufferRef.current;
if (!canvas || !buffer) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const dpr = window.devicePixelRatio || 1;
const rect = canvas.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
const dur = buffer.duration;
const zoom = zoomRef.current;
const scroll = scrollSecsRef.current;
const pxPerSec = (width * zoom) / dur;
const sampleRate = buffer.sampleRate;
const channelData = buffer.getChannelData(0);
ctx.clearRect(0, 0, width, height);
// --- Ruler background ---
ctx.fillStyle = '#13141f';
ctx.fillRect(0, 0, width, RULER_H);
// Separator line
ctx.strokeStyle = '#2a2d3e';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, RULER_H);
ctx.lineTo(width, RULER_H);
ctx.stroke();
// --- Ruler ticks & labels ---
const { major, minor } = pickInterval(pxPerSec);
const visibleDur = width / pxPerSec;
// Minor ticks
const minorStart = Math.floor(scroll / minor) * minor;
ctx.strokeStyle = '#3a3d52';
ctx.lineWidth = 1;
for (let t = minorStart; t <= scroll + visibleDur + minor; t = Math.round((t + minor) * 1e6) / 1e6) {
const x = (t - scroll) * pxPerSec;
if (x < 0 || x > width) continue;
ctx.beginPath();
ctx.moveTo(x, RULER_H);
ctx.lineTo(x, RULER_H * 0.45);
ctx.stroke();
}
// Major ticks + labels
const majorStart = Math.floor(scroll / major) * major;
ctx.lineWidth = 1;
ctx.font = `9px "JetBrains Mono", "Courier New", monospace`;
ctx.textBaseline = 'top';
for (let t = majorStart; t <= scroll + visibleDur + major; t = Math.round((t + major) * 1e6) / 1e6) {
const x = (t - scroll) * pxPerSec;
if (x < -50 || x > width + 50) continue;
ctx.strokeStyle = '#4a4f6a';
ctx.beginPath();
ctx.moveTo(x, RULER_H);
ctx.lineTo(x, 0);
ctx.stroke();
if (x >= 2 && x < width - 2) {
ctx.fillStyle = '#6b7280';
ctx.fillText(formatTime(t), x + 3, 2);
}
}
// --- Waveform ---
const waveTop = RULER_H + 1;
const waveH = height - waveTop;
for (const range of deletedRanges) {
const x1 = (range.start - scroll) * pxPerSec;
const x2 = (range.end - scroll) * pxPerSec;
ctx.fillStyle = 'rgba(239, 68, 68, 0.15)';
ctx.fillRect(x1, waveTop, x2 - x1, waveH);
}
// Draw cut ranges (red overlays)
for (const range of cutRanges) {
const x1 = (range.start - scroll) * pxPerSec;
const x2 = (range.end - scroll) * pxPerSec;
const isSelected = selectedZone?.type === 'cut' && selectedZone.id === range.id;
ctx.fillStyle = isSelected ? 'rgba(239, 68, 68, 0.5)' : 'rgba(239, 68, 68, 0.3)';
ctx.fillRect(x1, waveTop, x2 - x1, waveH);
if (isSelected) {
ctx.strokeStyle = '#ef4444';
ctx.lineWidth = 2;
ctx.strokeRect(x1, waveTop, x2 - x1, waveH);
// Draw resize handles
ctx.fillStyle = '#ef4444';
ctx.beginPath();
ctx.arc(x1, waveTop + waveH / 2, 4, 0, 2 * Math.PI);
ctx.fill();
ctx.beginPath();
ctx.arc(x2, waveTop + waveH / 2, 4, 0, 2 * Math.PI);
ctx.fill();
}
}
// Draw mute ranges (blue overlays)
for (const range of muteRanges) {
const x1 = (range.start - scroll) * pxPerSec;
const x2 = (range.end - scroll) * pxPerSec;
const isSelected = selectedZone?.type === 'mute' && selectedZone.id === range.id;
ctx.fillStyle = isSelected ? 'rgba(59, 130, 246, 0.5)' : 'rgba(59, 130, 246, 0.3)';
ctx.fillRect(x1, waveTop, x2 - x1, waveH);
if (isSelected) {
ctx.strokeStyle = '#3b82f6';
ctx.lineWidth = 2;
ctx.strokeRect(x1, waveTop, x2 - x1, waveH);
// Draw resize handles
ctx.fillStyle = '#3b82f6';
ctx.beginPath();
ctx.arc(x1, waveTop + waveH / 2, 4, 0, 2 * Math.PI);
ctx.fill();
ctx.beginPath();
ctx.arc(x2, waveTop + waveH / 2, 4, 0, 2 * Math.PI);
ctx.fill();
}
}
// Draw selection overlay (when in cut/mute mode)
if ((cutMode || muteMode) && selectionStart !== null && selectionEnd !== null) {
const x1 = (Math.min(selectionStart, selectionEnd) - scroll) * pxPerSec;
const x2 = (Math.max(selectionStart, selectionEnd) - scroll) * pxPerSec;
ctx.fillStyle = cutMode ? 'rgba(239, 68, 68, 0.5)' : 'rgba(59, 130, 246, 0.5)';
ctx.fillRect(x1, waveTop, x2 - x1, waveH);
// Add border
ctx.strokeStyle = cutMode ? '#ef4444' : '#3b82f6';
ctx.lineWidth = 2;
ctx.strokeRect(x1, waveTop, x2 - x1, waveH);
}
const mid = waveTop + waveH / 2;
ctx.beginPath();
ctx.strokeStyle = '#4a4d5e';
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
const tStart = scroll + x / pxPerSec;
const tEnd = scroll + (x + 1) / pxPerSec;
const sStart = Math.floor(tStart * sampleRate);
const sEnd = Math.min(Math.ceil(tEnd * sampleRate), channelData.length);
if (sStart >= channelData.length) break;
let min = 0, max = 0;
for (let i = sStart; i < sEnd; i++) {
if (channelData[i] < min) min = channelData[i];
if (channelData[i] > max) max = channelData[i];
}
const amp = (waveH / 2) * 0.9;
ctx.moveTo(x, mid + min * amp);
ctx.lineTo(x, mid + max * amp);
}
ctx.stroke();
}, [deletedRanges, cutRanges, muteRanges, selectionStart, selectionEnd, cutMode, muteMode, selectedZone]);
// Keep the ref in sync with the latest drawStaticWaveform closure
useEffect(() => {
drawStaticWaveformRef.current = drawStaticWaveform;
}, [drawStaticWaveform]);
// Redraw static layer when deletedRanges change
useEffect(() => {
drawStaticWaveform();
}, [drawStaticWaveform]);
// Lightweight RAF loop for playhead only -- reads video.currentTime directly,
// never triggers React re-renders
useEffect(() => {
const headCanvas = headCanvasRef.current;
const waveCanvas = waveCanvasRef.current;
if (!headCanvas || !waveCanvas) return;
const tick = () => {
const ctx = headCanvas.getContext('2d');
if (!ctx) { rafRef.current = requestAnimationFrame(tick); return; }
const buffer = audioBufferRef.current;
const video = document.querySelector('video') as HTMLVideoElement | null;
const dur = buffer?.duration ?? 0;
const dpr = window.devicePixelRatio || 1;
const rect = headCanvas.getBoundingClientRect();
if (headCanvas.width !== waveCanvas.width || headCanvas.height !== waveCanvas.height) {
headCanvas.width = rect.width * dpr;
headCanvas.height = rect.height * dpr;
}
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
const width = rect.width;
const height = rect.height;
ctx.clearRect(0, 0, width, height);
if (dur > 0 && video) {
const pxPerSec = (width * zoomRef.current) / dur;
let px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
// If the playhead is off-screen (e.g. after a seek from the transcript),
// scroll so it's centered and redraw the static waveform layer.
if (px < 0 || px > width) {
const visibleSecs = width / pxPerSec;
const maxScroll = Math.max(0, dur - visibleSecs);
scrollSecsRef.current = Math.max(0, Math.min(maxScroll, video.currentTime - visibleSecs / 2));
drawStaticWaveformRef.current();
px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
}
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
}
rafRef.current = requestAnimationFrame(tick);
};
rafRef.current = requestAnimationFrame(tick);
return () => cancelAnimationFrame(rafRef.current);
}, [videoUrl]);
useEffect(() => {
const observer = new ResizeObserver(() => {
drawStaticWaveform();
});
if (containerRef.current) observer.observe(containerRef.current);
return () => observer.disconnect();
}, [drawStaticWaveform]);
const handleWheel = useCallback((e: React.WheelEvent) => {
e.preventDefault();
const buffer = audioBufferRef.current;
const canvas = waveCanvasRef.current;
if (!buffer || !canvas) return;
const dur = buffer.duration;
const width = canvas.getBoundingClientRect().width;
if (e.ctrlKey || e.metaKey) {
// Zoom around the cursor position
const mouseX = e.clientX - canvas.getBoundingClientRect().left;
const pxPerSecBefore = (width * zoomRef.current) / dur;
const timeCursor = scrollSecsRef.current + mouseX / pxPerSecBefore;
const factor = e.deltaY < 0 ? 1.25 : 1 / 1.25;
zoomRef.current = Math.max(1, Math.min(100, zoomRef.current * factor));
const pxPerSecAfter = (width * zoomRef.current) / dur;
scrollSecsRef.current = timeCursor - mouseX / pxPerSecAfter;
} else {
// Scroll horizontally
const pxPerSec = (width * zoomRef.current) / dur;
scrollSecsRef.current += (e.deltaY || e.deltaX) / pxPerSec * 1.5;
}
// Clamp scroll
const pxPerSec = (width * zoomRef.current) / dur;
const maxScroll = Math.max(0, dur - width / pxPerSec);
scrollSecsRef.current = Math.max(0, Math.min(scrollSecsRef.current, maxScroll));
drawStaticWaveform();
}, [drawStaticWaveform]);
const seekToClientX = useCallback((clientX: number) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = newTime;
}, [setCurrentTime]);
const clientXToTime = useCallback((clientX: number): number => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return 0;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
return Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
}, []);
const getZoneAtPosition = useCallback((clientX: number, clientY: number, forHover: boolean = false) => {
const buffer = audioBufferRef.current;
const canvas = waveCanvasRef.current;
if (!canvas || !buffer) return null;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const y = clientY - rect.top;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const scroll = scrollSecsRef.current;
const waveTop = RULER_H + 1;
const waveH = canvas.height - waveTop;
// Check if click is in waveform area
if (y < waveTop || y > waveTop + waveH) return null;
const clickTime = scroll + x / pxPerSec;
const handleSize = forHover ? 6 : 8; // Smaller hit area for hover, larger for click
// Check cut ranges
for (const range of cutRanges) {
const rangeX1 = (range.start - scroll) * pxPerSec;
const rangeX2 = (range.end - scroll) * pxPerSec;
const isSelected = selectedZone?.type === 'cut' && selectedZone.id === range.id;
if (forHover && isSelected) {
// For hover on selected zones, check edges
if (Math.abs(x - rangeX1) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'start' as const };
}
if (Math.abs(x - rangeX2) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'end' as const };
}
} else if (!forHover) {
// For click detection, check handles and body
if (isSelected) {
// For selected zones, allow clicking on edges for resizing
if (Math.abs(x - rangeX1) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'start' as const };
}
if (Math.abs(x - rangeX2) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'end' as const };
}
} else {
// For unselected zones, check small handle circles
// Check start handle
if (Math.abs(x - rangeX1) <= handleSize && Math.abs(y - (waveTop + waveH / 2)) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'start' as const };
}
// Check end handle
if (Math.abs(x - rangeX2) <= handleSize && Math.abs(y - (waveTop + waveH / 2)) <= handleSize) {
return { type: 'cut' as const, id: range.id, edge: 'end' as const };
}
}
// Check range body
if (x >= rangeX1 && x <= rangeX2) {
return { type: 'cut' as const, id: range.id, edge: 'move' as const };
}
}
}
// Check mute ranges
for (const range of muteRanges) {
const rangeX1 = (range.start - scroll) * pxPerSec;
const rangeX2 = (range.end - scroll) * pxPerSec;
const isSelected = selectedZone?.type === 'mute' && selectedZone.id === range.id;
if (forHover && isSelected) {
// For hover on selected zones, check edges
if (Math.abs(x - rangeX1) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'start' as const };
}
if (Math.abs(x - rangeX2) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'end' as const };
}
} else if (!forHover) {
// For click detection, check handles and body
if (isSelected) {
// For selected zones, allow clicking on edges for resizing
if (Math.abs(x - rangeX1) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'start' as const };
}
if (Math.abs(x - rangeX2) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'end' as const };
}
} else {
// For unselected zones, check small handle circles
// Check start handle
if (Math.abs(x - rangeX1) <= handleSize && Math.abs(y - (waveTop + waveH / 2)) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'start' as const };
}
// Check end handle
if (Math.abs(x - rangeX2) <= handleSize && Math.abs(y - (waveTop + waveH / 2)) <= handleSize) {
return { type: 'mute' as const, id: range.id, edge: 'end' as const };
}
}
// Check range body
if (x >= rangeX1 && x <= rangeX2) {
return { type: 'mute' as const, id: range.id, edge: 'move' as const };
}
}
}
return null;
}, [cutRanges, muteRanges, selectedZone]);
const handleMouseMove = useCallback((e: React.MouseEvent<HTMLCanvasElement>) => {
if (isDragging) return; // Don't change cursor while dragging
const zoneHit = getZoneAtPosition(e.clientX, e.clientY, true);
if (zoneHit && selectedZone && zoneHit.id === selectedZone.id) {
if (zoneHit.edge === 'start' || zoneHit.edge === 'end') {
setHoverCursor('ew-resize');
return;
}
}
setHoverCursor('crosshair');
}, [isDragging, getZoneAtPosition, selectedZone]);
const handleMouseDown = useCallback(
(e: React.MouseEvent<HTMLCanvasElement>) => {
e.preventDefault();
// Check if clicking on a zone
const zoneHit = getZoneAtPosition(e.clientX, e.clientY);
if (zoneHit) {
if (zoneHit.edge === 'move') {
setSelectedZone({ type: zoneHit.type, id: zoneHit.id });
} else {
setSelectedZone({ type: zoneHit.type, id: zoneHit.id });
setEditingZone(zoneHit);
editingZoneRef.current = zoneHit;
}
isDraggingRef.current = true;
setIsDragging(true);
const startTime = clientXToTime(e.clientX);
const originalRange = zoneHit.type === 'cut'
? cutRanges.find(r => r.id === zoneHit.id)
: muteRanges.find(r => r.id === zoneHit.id);
if (!originalRange) return;
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current || !editingZoneRef.current) return;
const currentTime = clientXToTime(ev.clientX);
const delta = currentTime - startTime;
const minZoneDuration = 0.05;
let newStart = originalRange.start;
let newEnd = originalRange.end;
if (editingZoneRef.current.edge === 'start') {
newStart = Math.max(0, Math.min(originalRange.end - minZoneDuration, originalRange.start + delta));
newEnd = originalRange.end; // Keep end fixed when dragging start
} else if (editingZoneRef.current.edge === 'end') {
newStart = originalRange.start; // Keep start fixed when dragging end
newEnd = Math.min(duration, Math.max(newStart + minZoneDuration, originalRange.end + delta));
} else if (editingZoneRef.current.edge === 'move') {
const zoneDuration = originalRange.end - originalRange.start;
const maxStart = Math.max(0, duration - zoneDuration);
newStart = Math.max(0, Math.min(maxStart, originalRange.start + delta));
newEnd = newStart + zoneDuration;
}
// Ensure valid range
if (newStart < newEnd) {
if (editingZoneRef.current.type === 'cut') {
updateCutRange(editingZoneRef.current.id, newStart, newEnd);
} else {
updateMuteRange(editingZoneRef.current.id, newStart, newEnd);
}
}
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
setEditingZone(null);
editingZoneRef.current = null;
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
return;
}
// Clear selection if clicking elsewhere
setSelectedZone(null);
setEditingZone(null);
if (cutMode || muteMode) {
// Range selection mode
const startTime = clientXToTime(e.clientX);
selectionStartRef.current = startTime;
setSelectionStart(startTime);
setSelectionEnd(startTime);
isDraggingRef.current = true;
setIsDragging(true);
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current) return;
const currentTime = clientXToTime(ev.clientX);
setSelectionEnd(currentTime);
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
if (selectionStartRef.current !== null && selectionEnd !== null) {
const start = Math.min(selectionStartRef.current, selectionEnd);
const end = Math.max(selectionStartRef.current, selectionEnd);
if (cutMode) {
addCutRange(start, end);
} else if (muteMode) {
addMuteRange(start, end);
}
}
// Reset selection
selectionStartRef.current = null;
setSelectionStart(null);
setSelectionEnd(null);
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
} else {
// Normal seek mode
isDraggingRef.current = true;
setIsDragging(true);
seekToClientX(e.clientX);
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current) return;
seekToClientX(ev.clientX);
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
}
},
[cutMode, muteMode, clientXToTime, seekToClientX, addCutRange, addMuteRange, selectionEnd, getZoneAtPosition],
);
// Handle keyboard shortcuts for zone editing
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
const target = e.target as HTMLElement | null;
if (target && (target.tagName === 'INPUT' || target.tagName === 'TEXTAREA' || target.tagName === 'SELECT')) {
return;
}
if (e.key === 'Escape') {
setSelectedZone(null);
setEditingZone(null);
editingZoneRef.current = null;
} else if (e.key === 'Delete' || e.key === 'Backspace') {
if (selectedZone) {
e.preventDefault();
e.stopPropagation();
e.stopImmediatePropagation();
if (selectedZone.type === 'cut') {
removeCutRange(selectedZone.id);
} else {
removeMuteRange(selectedZone.id);
}
setSelectedZone(null);
setEditingZone(null);
editingZoneRef.current = null;
}
}
};
// Capture phase ensures zone delete runs before app-level bubble shortcuts.
window.addEventListener('keydown', handleKeyDown, { capture: true });
return () => window.removeEventListener('keydown', handleKeyDown, { capture: true });
}, [selectedZone, removeCutRange, removeMuteRange]);
if (!videoUrl) {
return (
<div className="w-full h-full flex items-center justify-center text-editor-text-muted text-xs">
Load a video to see the waveform
</div>
);
}
return (
<div ref={containerRef} className="w-full h-full flex flex-col">
<div className="flex items-center justify-between px-3 py-1 shrink-0">
<span className="text-[10px] text-editor-text-muted font-medium uppercase tracking-wider">
Timeline
</span>
<span className="text-[10px] text-editor-text-muted">
Scroll · Ctrl+Scroll to zoom
</span>
</div>
{audioError ? (
<div className="flex-1 flex items-center justify-center gap-2 text-editor-text-muted text-xs">
<AlertTriangle className="w-4 h-4 text-yellow-500" />
<span>{audioError}</span>
</div>
) : (
<div className="flex-1 relative">
<canvas ref={waveCanvasRef} className="absolute inset-0 w-full h-full" />
<canvas
ref={headCanvasRef}
className="absolute inset-0 w-full h-full"
style={{ cursor: isDragging ? 'grabbing' : hoverCursor }}
onMouseDown={handleMouseDown}
onMouseMove={handleMouseMove}
onWheel={handleWheel}
/>
</div>
)}
</div>
);
}