close sh;able to save/load projects

This commit is contained in:
2026-03-30 18:36:41 -06:00
parent 246d816f84
commit ea3f1d2b23
15 changed files with 44871 additions and 31 deletions

View File

@ -0,0 +1,128 @@
import { useState, useCallback } from 'react';
import { useEditorStore } from '../store/editorStore';
import { Terminal, ChevronDown, ChevronUp, Play, Wifi } from 'lucide-react';
export default function DevPanel() {
const [open, setOpen] = useState(false);
const [pathInput, setPathInput] = useState('');
const [testResult, setTestResult] = useState<string | null>(null);
const [testing, setTesting] = useState(false);
const { backendUrl, videoPath, loadVideo } = useEditorStore();
const handleLoad = useCallback(() => {
const p = pathInput.trim();
if (p) loadVideo(p);
}, [pathInput, loadVideo]);
const testEndpoint = useCallback(async (endpoint: string) => {
setTesting(true);
setTestResult(null);
try {
const url = `${backendUrl}${endpoint}`;
const res = await fetch(url);
const text = res.headers.get('content-type')?.includes('json')
? JSON.stringify(await res.json(), null, 2)
: `${res.status} ${res.statusText} (${res.headers.get('content-type') ?? 'no type'})`;
setTestResult(`${url}\n${text}`);
} catch (e) {
setTestResult(`${e}`);
} finally {
setTesting(false);
}
}, [backendUrl]);
const testWaveform = useCallback(async () => {
const p = pathInput.trim() || videoPath;
if (!p) { setTestResult('No path to test'); return; }
setTesting(true);
setTestResult(null);
try {
const url = `${backendUrl}/audio/waveform?path=${encodeURIComponent(p)}`;
const res = await fetch(url);
if (res.ok) {
const buf = await res.arrayBuffer();
setTestResult(`✓ Waveform OK — ${buf.byteLength} bytes\n${url}`);
} else {
const body = await res.text().catch(() => '');
setTestResult(`✗ HTTP ${res.status}\n${body}`);
}
} catch (e) {
setTestResult(`${e}`);
} finally {
setTesting(false);
}
}, [backendUrl, pathInput, videoPath]);
return (
<div className="fixed bottom-0 right-0 z-50 w-96 font-mono text-[11px]">
{/* Header */}
<button
onClick={() => setOpen(o => !o)}
className="w-full flex items-center justify-between px-3 py-1.5 bg-[#0d0f1a] border-t border-l border-[#2a2d3e] text-[#6b7280] hover:text-white"
>
<span className="flex items-center gap-1.5">
<Terminal className="w-3 h-3" />
DevPanel
<span className="ml-2 text-[#4a4f6a]">{backendUrl}</span>
</span>
{open ? <ChevronDown className="w-3 h-3" /> : <ChevronUp className="w-3 h-3" />}
</button>
{open && (
<div className="bg-[#0d0f1a] border-t border-l border-[#2a2d3e] p-3 space-y-3">
{/* State */}
<div className="space-y-0.5 text-[#4a4f6a]">
<div>backendUrl: <span className="text-[#6366f1]">{backendUrl}</span></div>
<div className="truncate">videoPath: <span className="text-[#6366f1]">{videoPath ?? 'null'}</span></div>
</div>
{/* Load file by path */}
<div className="space-y-1">
<div className="text-[#6b7280] uppercase tracking-wider text-[9px]">Load file</div>
<div className="flex gap-1">
<input
type="text"
value={pathInput}
onChange={e => setPathInput(e.target.value)}
onKeyDown={e => e.key === 'Enter' && handleLoad()}
placeholder={videoPath ?? '/path/to/file.wav'}
className="flex-1 bg-[#13141f] border border-[#2a2d3e] rounded px-2 py-1 text-white placeholder-[#2a2d3e] focus:outline-none focus:border-[#6366f1]"
/>
<button
onClick={handleLoad}
disabled={!pathInput.trim()}
className="px-2 py-1 bg-[#6366f1] hover:bg-[#4f52d4] disabled:opacity-30 rounded text-white"
>
<Play className="w-3 h-3" />
</button>
</div>
</div>
{/* Quick tests */}
<div className="space-y-1">
<div className="text-[#6b7280] uppercase tracking-wider text-[9px]">Test endpoints</div>
<div className="flex flex-wrap gap-1">
<button onClick={() => testEndpoint('/health')} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] rounded text-[#6b7280] hover:text-white flex items-center gap-1">
<Wifi className="w-2.5 h-2.5" />/health
</button>
<button onClick={() => testEndpoint('/audio/capabilities')} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] rounded text-[#6b7280] hover:text-white">
/audio/capabilities
</button>
<button onClick={testWaveform} disabled={testing} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] disabled:opacity-40 rounded text-[#6b7280] hover:text-white">
/audio/waveform
</button>
</div>
</div>
{/* Result */}
{testResult && (
<pre className="bg-[#13141f] border border-[#2a2d3e] rounded p-2 text-[10px] text-[#9ca3af] whitespace-pre-wrap break-all max-h-32 overflow-y-auto">
{testResult}
</pre>
)}
</div>
)}
</div>
);
}

View File

@ -57,6 +57,15 @@ export default function TranscriptEditor() {
const handleWordMouseDown = useCallback(
(index: number, e: React.MouseEvent) => {
e.preventDefault();
// Ctrl+click → seek video to this word's start time
if (e.ctrlKey) {
const word = words[index];
if (word) {
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = word.start;
}
return;
}
wasDragging.current = false;
if (e.shiftKey && selectedWordIndices.length > 0) {
const first = selectedWordIndices[0];
@ -70,7 +79,7 @@ export default function TranscriptEditor() {
setSelectedWordIndices([index]);
}
},
[selectedWordIndices, setSelectedWordIndices],
[words, selectedWordIndices, setSelectedWordIndices],
);
const handleWordMouseEnter = useCallback(
@ -135,6 +144,7 @@ export default function TranscriptEditor() {
key={globalIndex}
id={`word-${globalIndex}`}
data-word-index={globalIndex}
title={`${word.start.toFixed(2)}s — Ctrl+click to seek`}
onMouseDown={(e) => handleWordMouseDown(globalIndex, e)}
onMouseEnter={() => handleWordMouseEnter(globalIndex)}
onMouseLeave={() => setHoveredWordIndex(null)}

View File

@ -42,6 +42,10 @@ export default function WaveformTimeline() {
const zoomRef = useRef(1); // 1 = show all, >1 = zoomed in
const scrollSecsRef = useRef(0); // seconds scrolled from left
const rafRef = useRef(0);
// Ref so the RAF loop can call drawStaticWaveform without a stale closure
const drawStaticWaveformRef = useRef<() => void>(() => {});
const isDraggingRef = useRef(false);
const [isDragging, setIsDragging] = useState(false);
useEffect(() => {
if (!videoUrl || !videoPath) return;
@ -226,6 +230,11 @@ export default function WaveformTimeline() {
ctx.stroke();
}, [deletedRanges]);
// Keep the ref in sync with the latest drawStaticWaveform closure
useEffect(() => {
drawStaticWaveformRef.current = drawStaticWaveform;
}, [drawStaticWaveform]);
// Redraw static layer when deletedRanges change
useEffect(() => {
drawStaticWaveform();
@ -260,15 +269,24 @@ export default function WaveformTimeline() {
if (dur > 0 && video) {
const pxPerSec = (width * zoomRef.current) / dur;
const px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
if (px >= 0 && px <= width) {
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
let px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
// If the playhead is off-screen (e.g. after a seek from the transcript),
// scroll so it's centered and redraw the static waveform layer.
if (px < 0 || px > width) {
const visibleSecs = width / pxPerSec;
const maxScroll = Math.max(0, dur - visibleSecs);
scrollSecsRef.current = Math.max(0, Math.min(maxScroll, video.currentTime - visibleSecs / 2));
drawStaticWaveformRef.current();
px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
}
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
}
rafRef.current = requestAnimationFrame(tick);
@ -316,20 +334,40 @@ export default function WaveformTimeline() {
drawStaticWaveform();
}, [drawStaticWaveform]);
const handleClick = useCallback(
const seekToClientX = useCallback((clientX: number) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = newTime;
}, [setCurrentTime]);
const handleMouseDown = useCallback(
(e: React.MouseEvent<HTMLCanvasElement>) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video');
if (video) video.currentTime = newTime;
e.preventDefault();
isDraggingRef.current = true;
setIsDragging(true);
seekToClientX(e.clientX);
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current) return;
seekToClientX(ev.clientX);
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
},
[setCurrentTime],
[seekToClientX],
);
if (!videoUrl) {
@ -360,8 +398,8 @@ export default function WaveformTimeline() {
<canvas ref={waveCanvasRef} className="absolute inset-0 w-full h-full" />
<canvas
ref={headCanvasRef}
className="absolute inset-0 w-full h-full cursor-crosshair"
onClick={handleClick}
className={`absolute inset-0 w-full h-full ${isDragging ? 'cursor-grabbing' : 'cursor-crosshair'}`}
onMouseDown={handleMouseDown}
onWheel={handleWheel}
/>
</div>