Files
TalkEdit/frontend/src/components/WaveformTimeline.tsx

410 lines
14 KiB
TypeScript
Raw Normal View History

import { useRef, useEffect, useCallback, useState } from 'react';
import { useEditorStore } from '../store/editorStore';
2026-03-28 15:09:56 -06:00
import { AlertTriangle } from 'lucide-react';
const RULER_H = 20; // px reserved at top of canvas for the time ruler
function formatTime(secs: number): string {
const m = Math.floor(secs / 60);
const s = secs % 60;
if (m > 0) return `${m}:${String(Math.floor(s)).padStart(2, '0')}.${Math.floor((s % 1) * 10)}`;
return `${s.toFixed(1)}s`;
}
function pickInterval(pxPerSec: number): { major: number; minor: number } {
const NICE = [0.05, 0.1, 0.25, 0.5, 1, 2, 5, 10, 15, 30, 60, 120, 300, 600];
let major = NICE[NICE.length - 1];
for (const n of NICE) {
if (n * pxPerSec >= 70) { major = n; break; }
}
let minor = major;
for (const n of NICE) {
if (n * pxPerSec >= 6 && n < major) { minor = n; }
}
return { major, minor };
}
export default function WaveformTimeline() {
const waveCanvasRef = useRef<HTMLCanvasElement>(null);
const headCanvasRef = useRef<HTMLCanvasElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const [audioError, setAudioError] = useState<string | null>(null);
const videoUrl = useEditorStore((s) => s.videoUrl);
const videoPath = useEditorStore((s) => s.videoPath);
2026-03-28 12:26:45 -06:00
const backendUrl = useEditorStore((s) => s.backendUrl);
const duration = useEditorStore((s) => s.duration);
const deletedRanges = useEditorStore((s) => s.deletedRanges);
const setCurrentTime = useEditorStore((s) => s.setCurrentTime);
const audioContextRef = useRef<AudioContext | null>(null);
const audioBufferRef = useRef<AudioBuffer | null>(null);
2026-03-28 15:09:56 -06:00
const zoomRef = useRef(1); // 1 = show all, >1 = zoomed in
const scrollSecsRef = useRef(0); // seconds scrolled from left
const rafRef = useRef(0);
2026-03-30 18:36:41 -06:00
// Ref so the RAF loop can call drawStaticWaveform without a stale closure
const drawStaticWaveformRef = useRef<() => void>(() => {});
const isDraggingRef = useRef(false);
const [isDragging, setIsDragging] = useState(false);
useEffect(() => {
if (!videoUrl || !videoPath) return;
setAudioError(null);
const loadAudio = async () => {
try {
2026-03-28 12:26:45 -06:00
const waveformUrl = `${backendUrl}/audio/waveform?path=${encodeURIComponent(videoPath!)}`;
2026-03-28 15:09:56 -06:00
console.log('[WaveformTimeline] backendUrl:', backendUrl, '| videoPath:', videoPath);
console.log('[WaveformTimeline] Fetching:', waveformUrl);
const ctx = new AudioContext();
audioContextRef.current = ctx;
2026-03-28 12:26:45 -06:00
const response = await fetch(waveformUrl);
if (!response.ok) {
const body = await response.text().catch(() => '');
console.error(
`[WaveformTimeline] Fetch failed — HTTP ${response.status} ${response.statusText}`,
{ url: waveformUrl, body }
);
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const contentType = response.headers.get('content-type') ?? 'unknown';
const contentLength = response.headers.get('content-length');
console.log(
`[WaveformTimeline] Fetch OK — content-type: ${contentType}, size: ${contentLength ?? 'unknown'} bytes`
);
const arrayBuffer = await response.arrayBuffer();
2026-03-28 12:26:45 -06:00
console.log(`[WaveformTimeline] ArrayBuffer size: ${arrayBuffer.byteLength} bytes`);
if (arrayBuffer.byteLength === 0) {
throw new Error('Server returned an empty file');
}
let audioBuffer: AudioBuffer;
try {
audioBuffer = await ctx.decodeAudioData(arrayBuffer);
} catch (decodeErr) {
console.error(
'[WaveformTimeline] decodeAudioData failed — browser cannot decode this format.',
{
contentType,
byteLength: arrayBuffer.byteLength,
videoPath,
error: decodeErr,
}
);
throw new Error(
`Browser could not decode audio (${contentType}). ` +
`For best compatibility use MP4/AAC or WebM/Opus. Raw error: ${decodeErr}`
);
}
console.log(
`[WaveformTimeline] Decoded OK — duration: ${audioBuffer.duration.toFixed(2)}s, ` +
`channels: ${audioBuffer.numberOfChannels}, sampleRate: ${audioBuffer.sampleRate}Hz`
);
audioBufferRef.current = audioBuffer;
drawStaticWaveform();
} catch (err) {
2026-03-28 12:26:45 -06:00
console.error('[WaveformTimeline] Waveform load failed:', err);
2026-03-28 15:09:56 -06:00
const waveformUrl2 = `${backendUrl}/audio/waveform?path=${encodeURIComponent(videoPath ?? '')}`;
setAudioError(`Waveform unavailable — ${err instanceof Error ? err.message : 'audio could not be decoded'} [URL: ${waveformUrl2}]`);
}
};
loadAudio();
return () => {
audioContextRef.current?.close();
};
2026-03-28 12:26:45 -06:00
}, [videoUrl, videoPath, backendUrl]);
const drawStaticWaveform = useCallback(() => {
const canvas = waveCanvasRef.current;
const buffer = audioBufferRef.current;
if (!canvas || !buffer) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const dpr = window.devicePixelRatio || 1;
const rect = canvas.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
2026-03-28 15:09:56 -06:00
const dur = buffer.duration;
const zoom = zoomRef.current;
const scroll = scrollSecsRef.current;
const pxPerSec = (width * zoom) / dur;
const sampleRate = buffer.sampleRate;
const channelData = buffer.getChannelData(0);
ctx.clearRect(0, 0, width, height);
2026-03-28 15:09:56 -06:00
// --- Ruler background ---
ctx.fillStyle = '#13141f';
ctx.fillRect(0, 0, width, RULER_H);
// Separator line
ctx.strokeStyle = '#2a2d3e';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, RULER_H);
ctx.lineTo(width, RULER_H);
ctx.stroke();
// --- Ruler ticks & labels ---
const { major, minor } = pickInterval(pxPerSec);
const visibleDur = width / pxPerSec;
// Minor ticks
const minorStart = Math.floor(scroll / minor) * minor;
ctx.strokeStyle = '#3a3d52';
ctx.lineWidth = 1;
for (let t = minorStart; t <= scroll + visibleDur + minor; t = Math.round((t + minor) * 1e6) / 1e6) {
const x = (t - scroll) * pxPerSec;
if (x < 0 || x > width) continue;
ctx.beginPath();
ctx.moveTo(x, RULER_H);
ctx.lineTo(x, RULER_H * 0.45);
ctx.stroke();
}
// Major ticks + labels
const majorStart = Math.floor(scroll / major) * major;
ctx.lineWidth = 1;
ctx.font = `9px "JetBrains Mono", "Courier New", monospace`;
ctx.textBaseline = 'top';
for (let t = majorStart; t <= scroll + visibleDur + major; t = Math.round((t + major) * 1e6) / 1e6) {
const x = (t - scroll) * pxPerSec;
if (x < -50 || x > width + 50) continue;
ctx.strokeStyle = '#4a4f6a';
ctx.beginPath();
ctx.moveTo(x, RULER_H);
ctx.lineTo(x, 0);
ctx.stroke();
if (x >= 2 && x < width - 2) {
ctx.fillStyle = '#6b7280';
ctx.fillText(formatTime(t), x + 3, 2);
}
}
// --- Waveform ---
const waveTop = RULER_H + 1;
const waveH = height - waveTop;
for (const range of deletedRanges) {
2026-03-28 15:09:56 -06:00
const x1 = (range.start - scroll) * pxPerSec;
const x2 = (range.end - scroll) * pxPerSec;
ctx.fillStyle = 'rgba(239, 68, 68, 0.15)';
2026-03-28 15:09:56 -06:00
ctx.fillRect(x1, waveTop, x2 - x1, waveH);
}
2026-03-28 15:09:56 -06:00
const mid = waveTop + waveH / 2;
ctx.beginPath();
ctx.strokeStyle = '#4a4d5e';
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
2026-03-28 15:09:56 -06:00
const tStart = scroll + x / pxPerSec;
const tEnd = scroll + (x + 1) / pxPerSec;
const sStart = Math.floor(tStart * sampleRate);
const sEnd = Math.min(Math.ceil(tEnd * sampleRate), channelData.length);
if (sStart >= channelData.length) break;
let min = 0, max = 0;
for (let i = sStart; i < sEnd; i++) {
if (channelData[i] < min) min = channelData[i];
if (channelData[i] > max) max = channelData[i];
}
2026-03-28 15:09:56 -06:00
const amp = (waveH / 2) * 0.9;
ctx.moveTo(x, mid + min * amp);
ctx.lineTo(x, mid + max * amp);
}
ctx.stroke();
}, [deletedRanges]);
2026-03-30 18:36:41 -06:00
// Keep the ref in sync with the latest drawStaticWaveform closure
useEffect(() => {
drawStaticWaveformRef.current = drawStaticWaveform;
}, [drawStaticWaveform]);
// Redraw static layer when deletedRanges change
useEffect(() => {
drawStaticWaveform();
}, [drawStaticWaveform]);
// Lightweight RAF loop for playhead only -- reads video.currentTime directly,
// never triggers React re-renders
useEffect(() => {
const headCanvas = headCanvasRef.current;
const waveCanvas = waveCanvasRef.current;
if (!headCanvas || !waveCanvas) return;
const tick = () => {
const ctx = headCanvas.getContext('2d');
if (!ctx) { rafRef.current = requestAnimationFrame(tick); return; }
const buffer = audioBufferRef.current;
const video = document.querySelector('video') as HTMLVideoElement | null;
const dur = buffer?.duration ?? 0;
const dpr = window.devicePixelRatio || 1;
const rect = headCanvas.getBoundingClientRect();
if (headCanvas.width !== waveCanvas.width || headCanvas.height !== waveCanvas.height) {
headCanvas.width = rect.width * dpr;
headCanvas.height = rect.height * dpr;
}
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
const width = rect.width;
const height = rect.height;
ctx.clearRect(0, 0, width, height);
if (dur > 0 && video) {
2026-03-28 15:09:56 -06:00
const pxPerSec = (width * zoomRef.current) / dur;
2026-03-30 18:36:41 -06:00
let px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
// If the playhead is off-screen (e.g. after a seek from the transcript),
// scroll so it's centered and redraw the static waveform layer.
if (px < 0 || px > width) {
const visibleSecs = width / pxPerSec;
const maxScroll = Math.max(0, dur - visibleSecs);
scrollSecsRef.current = Math.max(0, Math.min(maxScroll, video.currentTime - visibleSecs / 2));
drawStaticWaveformRef.current();
px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
2026-03-28 15:09:56 -06:00
}
2026-03-30 18:36:41 -06:00
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
}
rafRef.current = requestAnimationFrame(tick);
};
rafRef.current = requestAnimationFrame(tick);
return () => cancelAnimationFrame(rafRef.current);
}, [videoUrl]);
useEffect(() => {
const observer = new ResizeObserver(() => {
drawStaticWaveform();
});
if (containerRef.current) observer.observe(containerRef.current);
return () => observer.disconnect();
}, [drawStaticWaveform]);
2026-03-28 15:09:56 -06:00
const handleWheel = useCallback((e: React.WheelEvent) => {
e.preventDefault();
const buffer = audioBufferRef.current;
const canvas = waveCanvasRef.current;
if (!buffer || !canvas) return;
const dur = buffer.duration;
const width = canvas.getBoundingClientRect().width;
if (e.ctrlKey || e.metaKey) {
// Zoom around the cursor position
const mouseX = e.clientX - canvas.getBoundingClientRect().left;
const pxPerSecBefore = (width * zoomRef.current) / dur;
const timeCursor = scrollSecsRef.current + mouseX / pxPerSecBefore;
const factor = e.deltaY < 0 ? 1.25 : 1 / 1.25;
zoomRef.current = Math.max(1, Math.min(100, zoomRef.current * factor));
const pxPerSecAfter = (width * zoomRef.current) / dur;
scrollSecsRef.current = timeCursor - mouseX / pxPerSecAfter;
} else {
// Scroll horizontally
const pxPerSec = (width * zoomRef.current) / dur;
scrollSecsRef.current += (e.deltaY || e.deltaX) / pxPerSec * 1.5;
}
// Clamp scroll
const pxPerSec = (width * zoomRef.current) / dur;
const maxScroll = Math.max(0, dur - width / pxPerSec);
scrollSecsRef.current = Math.max(0, Math.min(scrollSecsRef.current, maxScroll));
drawStaticWaveform();
}, [drawStaticWaveform]);
2026-03-30 18:36:41 -06:00
const seekToClientX = useCallback((clientX: number) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = newTime;
}, [setCurrentTime]);
const handleMouseDown = useCallback(
(e: React.MouseEvent<HTMLCanvasElement>) => {
2026-03-30 18:36:41 -06:00
e.preventDefault();
isDraggingRef.current = true;
setIsDragging(true);
seekToClientX(e.clientX);
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current) return;
seekToClientX(ev.clientX);
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
},
2026-03-30 18:36:41 -06:00
[seekToClientX],
);
if (!videoUrl) {
return (
<div className="w-full h-full flex items-center justify-center text-editor-text-muted text-xs">
Load a video to see the waveform
</div>
);
}
return (
<div ref={containerRef} className="w-full h-full flex flex-col">
<div className="flex items-center justify-between px-3 py-1 shrink-0">
<span className="text-[10px] text-editor-text-muted font-medium uppercase tracking-wider">
Timeline
</span>
2026-03-28 15:09:56 -06:00
<span className="text-[10px] text-editor-text-muted">
Scroll · Ctrl+Scroll to zoom
</span>
</div>
{audioError ? (
<div className="flex-1 flex items-center justify-center gap-2 text-editor-text-muted text-xs">
<AlertTriangle className="w-4 h-4 text-yellow-500" />
<span>{audioError}</span>
</div>
) : (
<div className="flex-1 relative">
<canvas ref={waveCanvasRef} className="absolute inset-0 w-full h-full" />
<canvas
ref={headCanvasRef}
2026-03-30 18:36:41 -06:00
className={`absolute inset-0 w-full h-full ${isDragging ? 'cursor-grabbing' : 'cursor-crosshair'}`}
onMouseDown={handleMouseDown}
2026-03-28 15:09:56 -06:00
onWheel={handleWheel}
/>
</div>
)}
</div>
);
}