close sh;able to save/load projects

This commit is contained in:
2026-03-30 18:36:41 -06:00
parent 246d816f84
commit ea3f1d2b23
15 changed files with 44871 additions and 31 deletions

View File

@ -1,6 +1,7 @@
import logging
import os
import stat
import sys
from contextlib import asynccontextmanager
from pathlib import Path
@ -13,6 +14,9 @@ from routers import transcribe, export, ai, captions, audio
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Dev log file — frontend forwards console.error/warn here so the agent can read it
DEV_LOG_PATH = Path(__file__).parent.parent / "webview.log"
@asynccontextmanager
async def lifespan(app: FastAPI):
@ -134,3 +138,21 @@ async def serve_local_file(request: Request, path: str = Query(...)):
@app.get("/health")
async def health():
return {"status": "ok"}
import datetime
@app.post("/dev/log")
async def dev_log(request: Request):
data = await request.json()
level = data.get("level", "log")
msg = str(data.get("message", ""))
args = [str(a) for a in data.get("args", [])]
ts = datetime.datetime.now().strftime("%H:%M:%S.%f")[:-3]
line = f"[{ts}] [{level.upper():5}] {msg}"
if args:
line += " " + " ".join(args)
line += "\n"
with open(DEV_LOG_PATH, "a") as f:
f.write(line)
return {"ok": True}

File diff suppressed because it is too large Load Diff

49
close Executable file
View File

@ -0,0 +1,49 @@
#!/bin/bash
# Close TalkEdit and/or CutScript processes (Tauri dev, Electron, and Python backends)
KILLED_ANY=0
kill_port() {
local port=$1
local name=$2
local pids
pids=$(lsof -ti tcp:"$port" 2>/dev/null)
if [[ -n "$pids" ]]; then
echo "Stopping $name backend (port $port, PID $pids)..."
kill "$pids" 2>/dev/null
KILLED_ANY=1
fi
}
kill_pattern() {
local pattern=$1
local label=$2
local pids
pids=$(pgrep -f "$pattern" 2>/dev/null)
if [[ -n "$pids" ]]; then
echo "Stopping $label..."
kill $pids 2>/dev/null
KILLED_ANY=1
fi
}
# --- TalkEdit (Tauri, port 8000) ---
kill_port 8000 "TalkEdit"
kill_pattern "tauri.*TalkEdit\|TalkEdit.*tauri\|cargo.*tauri dev\|/TalkEdit/target/debug" "TalkEdit (Tauri dev)"
# Vite dev server for TalkEdit (port 5173)
kill_pattern "vite.*5173\|rsbuild.*5173" "TalkEdit frontend dev server"
# --- CutScript (Electron, port 8642) ---
kill_port 8642 "CutScript"
kill_pattern "electron.*CutScript\|CutScript.*electron" "CutScript (Electron)"
kill_pattern "vite.*CutScript\|CutScript.*vite" "CutScript frontend dev server"
# --- Orphaned uvicorn workers for either app ---
kill_pattern "uvicorn.*main:app.*--port 800[012]" "leftover uvicorn workers (TalkEdit)"
kill_pattern "uvicorn.*main:app.*--port 864" "leftover uvicorn workers (CutScript)"
if [[ $KILLED_ANY -eq 0 ]]; then
echo "Nothing to close — no TalkEdit or CutScript processes found."
else
echo "Done."
fi

View File

@ -1,5 +1,4 @@
import { useEffect, useState, useRef } from 'react';
import { invoke } from '@tauri-apps/api/core';
import { useEditorStore } from './store/editorStore';
import VideoPlayer from './components/VideoPlayer';
import TranscriptEditor from './components/TranscriptEditor';
@ -7,6 +6,7 @@ import WaveformTimeline from './components/WaveformTimeline';
import AIPanel from './components/AIPanel';
import ExportDialog from './components/ExportDialog';
import SettingsPanel from './components/SettingsPanel';
import DevPanel from './components/DevPanel';
import { useKeyboardShortcuts } from './hooks/useKeyboardShortcuts';
import {
Film,
@ -16,10 +16,10 @@ import {
Download,
FolderSearch,
FileInput,
Save,
} from 'lucide-react';
const IS_ELECTRON = !!window.electronAPI;
const IS_TAURI = !IS_ELECTRON && '__TAURI_INTERNALS__' in window;
type Panel = 'ai' | 'settings' | 'export' | null;
@ -47,9 +47,10 @@ export default function App() {
useEffect(() => {
if (IS_ELECTRON) {
window.electronAPI!.getBackendUrl().then(setBackendUrl);
} else if (IS_TAURI) {
invoke<string>('get_backend_url').then(setBackendUrl).catch(console.error);
}
// In Tauri on Linux/WebKit2GTK the ipc:// custom protocol is blocked by
// WebKit internals; postMessage fallback works but logs noisy warnings.
// The backend URL is fixed at 127.0.0.1:8000 so we rely on the store default.
}, [setBackendUrl]);
const handleLoadProject = async () => {
@ -66,6 +67,20 @@ export default function App() {
}
};
const handleSaveProject = async () => {
if (!IS_ELECTRON) return;
try {
const savePath = await window.electronAPI!.saveProject();
if (!savePath) return;
const data = useEditorStore.getState().saveProject();
const path = savePath.endsWith('.aive') ? savePath : `${savePath}.aive`;
await window.electronAPI!.writeFile(path, JSON.stringify(data, null, 2));
} catch (err) {
console.error('Failed to save project:', err);
alert(`Failed to save project: ${err}`);
}
};
const handleOpenFile = async () => {
if (IS_ELECTRON) {
const path = await window.electronAPI!.openFile();
@ -216,6 +231,21 @@ export default function App() {
label="Open"
onClick={IS_ELECTRON ? handleOpenFile : () => useEditorStore.getState().reset()}
/>
{IS_ELECTRON && (
<ToolbarButton
icon={<Save className="w-4 h-4" />}
label="Save"
onClick={handleSaveProject}
disabled={words.length === 0}
/>
)}
{IS_ELECTRON && (
<ToolbarButton
icon={<FileInput className="w-4 h-4" />}
label="Load"
onClick={handleLoadProject}
/>
)}
<ToolbarButton
icon={<Sparkles className="w-4 h-4" />}
label="AI"
@ -296,6 +326,7 @@ export default function App() {
</div>
)}
</div>
{import.meta.env.DEV && <DevPanel />}
</div>
);
}

View File

@ -0,0 +1,128 @@
import { useState, useCallback } from 'react';
import { useEditorStore } from '../store/editorStore';
import { Terminal, ChevronDown, ChevronUp, Play, Wifi } from 'lucide-react';
export default function DevPanel() {
const [open, setOpen] = useState(false);
const [pathInput, setPathInput] = useState('');
const [testResult, setTestResult] = useState<string | null>(null);
const [testing, setTesting] = useState(false);
const { backendUrl, videoPath, loadVideo } = useEditorStore();
const handleLoad = useCallback(() => {
const p = pathInput.trim();
if (p) loadVideo(p);
}, [pathInput, loadVideo]);
const testEndpoint = useCallback(async (endpoint: string) => {
setTesting(true);
setTestResult(null);
try {
const url = `${backendUrl}${endpoint}`;
const res = await fetch(url);
const text = res.headers.get('content-type')?.includes('json')
? JSON.stringify(await res.json(), null, 2)
: `${res.status} ${res.statusText} (${res.headers.get('content-type') ?? 'no type'})`;
setTestResult(`${url}\n${text}`);
} catch (e) {
setTestResult(`${e}`);
} finally {
setTesting(false);
}
}, [backendUrl]);
const testWaveform = useCallback(async () => {
const p = pathInput.trim() || videoPath;
if (!p) { setTestResult('No path to test'); return; }
setTesting(true);
setTestResult(null);
try {
const url = `${backendUrl}/audio/waveform?path=${encodeURIComponent(p)}`;
const res = await fetch(url);
if (res.ok) {
const buf = await res.arrayBuffer();
setTestResult(`✓ Waveform OK — ${buf.byteLength} bytes\n${url}`);
} else {
const body = await res.text().catch(() => '');
setTestResult(`✗ HTTP ${res.status}\n${body}`);
}
} catch (e) {
setTestResult(`${e}`);
} finally {
setTesting(false);
}
}, [backendUrl, pathInput, videoPath]);
return (
<div className="fixed bottom-0 right-0 z-50 w-96 font-mono text-[11px]">
{/* Header */}
<button
onClick={() => setOpen(o => !o)}
className="w-full flex items-center justify-between px-3 py-1.5 bg-[#0d0f1a] border-t border-l border-[#2a2d3e] text-[#6b7280] hover:text-white"
>
<span className="flex items-center gap-1.5">
<Terminal className="w-3 h-3" />
DevPanel
<span className="ml-2 text-[#4a4f6a]">{backendUrl}</span>
</span>
{open ? <ChevronDown className="w-3 h-3" /> : <ChevronUp className="w-3 h-3" />}
</button>
{open && (
<div className="bg-[#0d0f1a] border-t border-l border-[#2a2d3e] p-3 space-y-3">
{/* State */}
<div className="space-y-0.5 text-[#4a4f6a]">
<div>backendUrl: <span className="text-[#6366f1]">{backendUrl}</span></div>
<div className="truncate">videoPath: <span className="text-[#6366f1]">{videoPath ?? 'null'}</span></div>
</div>
{/* Load file by path */}
<div className="space-y-1">
<div className="text-[#6b7280] uppercase tracking-wider text-[9px]">Load file</div>
<div className="flex gap-1">
<input
type="text"
value={pathInput}
onChange={e => setPathInput(e.target.value)}
onKeyDown={e => e.key === 'Enter' && handleLoad()}
placeholder={videoPath ?? '/path/to/file.wav'}
className="flex-1 bg-[#13141f] border border-[#2a2d3e] rounded px-2 py-1 text-white placeholder-[#2a2d3e] focus:outline-none focus:border-[#6366f1]"
/>
<button
onClick={handleLoad}
disabled={!pathInput.trim()}
className="px-2 py-1 bg-[#6366f1] hover:bg-[#4f52d4] disabled:opacity-30 rounded text-white"
>
<Play className="w-3 h-3" />
</button>
</div>
</div>
{/* Quick tests */}
<div className="space-y-1">
<div className="text-[#6b7280] uppercase tracking-wider text-[9px]">Test endpoints</div>
<div className="flex flex-wrap gap-1">
<button onClick={() => testEndpoint('/health')} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] rounded text-[#6b7280] hover:text-white flex items-center gap-1">
<Wifi className="w-2.5 h-2.5" />/health
</button>
<button onClick={() => testEndpoint('/audio/capabilities')} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] rounded text-[#6b7280] hover:text-white">
/audio/capabilities
</button>
<button onClick={testWaveform} disabled={testing} className="px-2 py-0.5 bg-[#1e2030] hover:bg-[#2a2d3e] disabled:opacity-40 rounded text-[#6b7280] hover:text-white">
/audio/waveform
</button>
</div>
</div>
{/* Result */}
{testResult && (
<pre className="bg-[#13141f] border border-[#2a2d3e] rounded p-2 text-[10px] text-[#9ca3af] whitespace-pre-wrap break-all max-h-32 overflow-y-auto">
{testResult}
</pre>
)}
</div>
)}
</div>
);
}

View File

@ -57,6 +57,15 @@ export default function TranscriptEditor() {
const handleWordMouseDown = useCallback(
(index: number, e: React.MouseEvent) => {
e.preventDefault();
// Ctrl+click → seek video to this word's start time
if (e.ctrlKey) {
const word = words[index];
if (word) {
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = word.start;
}
return;
}
wasDragging.current = false;
if (e.shiftKey && selectedWordIndices.length > 0) {
const first = selectedWordIndices[0];
@ -70,7 +79,7 @@ export default function TranscriptEditor() {
setSelectedWordIndices([index]);
}
},
[selectedWordIndices, setSelectedWordIndices],
[words, selectedWordIndices, setSelectedWordIndices],
);
const handleWordMouseEnter = useCallback(
@ -135,6 +144,7 @@ export default function TranscriptEditor() {
key={globalIndex}
id={`word-${globalIndex}`}
data-word-index={globalIndex}
title={`${word.start.toFixed(2)}s — Ctrl+click to seek`}
onMouseDown={(e) => handleWordMouseDown(globalIndex, e)}
onMouseEnter={() => handleWordMouseEnter(globalIndex)}
onMouseLeave={() => setHoveredWordIndex(null)}

View File

@ -42,6 +42,10 @@ export default function WaveformTimeline() {
const zoomRef = useRef(1); // 1 = show all, >1 = zoomed in
const scrollSecsRef = useRef(0); // seconds scrolled from left
const rafRef = useRef(0);
// Ref so the RAF loop can call drawStaticWaveform without a stale closure
const drawStaticWaveformRef = useRef<() => void>(() => {});
const isDraggingRef = useRef(false);
const [isDragging, setIsDragging] = useState(false);
useEffect(() => {
if (!videoUrl || !videoPath) return;
@ -226,6 +230,11 @@ export default function WaveformTimeline() {
ctx.stroke();
}, [deletedRanges]);
// Keep the ref in sync with the latest drawStaticWaveform closure
useEffect(() => {
drawStaticWaveformRef.current = drawStaticWaveform;
}, [drawStaticWaveform]);
// Redraw static layer when deletedRanges change
useEffect(() => {
drawStaticWaveform();
@ -260,15 +269,24 @@ export default function WaveformTimeline() {
if (dur > 0 && video) {
const pxPerSec = (width * zoomRef.current) / dur;
const px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
if (px >= 0 && px <= width) {
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
let px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
// If the playhead is off-screen (e.g. after a seek from the transcript),
// scroll so it's centered and redraw the static waveform layer.
if (px < 0 || px > width) {
const visibleSecs = width / pxPerSec;
const maxScroll = Math.max(0, dur - visibleSecs);
scrollSecsRef.current = Math.max(0, Math.min(maxScroll, video.currentTime - visibleSecs / 2));
drawStaticWaveformRef.current();
px = (video.currentTime - scrollSecsRef.current) * pxPerSec;
}
ctx.beginPath();
ctx.strokeStyle = '#6366f1';
ctx.lineWidth = 2;
ctx.moveTo(px, 0);
ctx.lineTo(px, height);
ctx.stroke();
}
rafRef.current = requestAnimationFrame(tick);
@ -316,20 +334,40 @@ export default function WaveformTimeline() {
drawStaticWaveform();
}, [drawStaticWaveform]);
const handleClick = useCallback(
const seekToClientX = useCallback((clientX: number) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video') as HTMLVideoElement | null;
if (video) video.currentTime = newTime;
}, [setCurrentTime]);
const handleMouseDown = useCallback(
(e: React.MouseEvent<HTMLCanvasElement>) => {
const buffer = audioBufferRef.current;
const canvas = headCanvasRef.current;
if (!canvas || !buffer) return;
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const pxPerSec = (rect.width * zoomRef.current) / buffer.duration;
const newTime = Math.max(0, Math.min(buffer.duration, scrollSecsRef.current + x / pxPerSec));
setCurrentTime(newTime);
const video = document.querySelector('video');
if (video) video.currentTime = newTime;
e.preventDefault();
isDraggingRef.current = true;
setIsDragging(true);
seekToClientX(e.clientX);
const onMove = (ev: MouseEvent) => {
if (!isDraggingRef.current) return;
seekToClientX(ev.clientX);
};
const onUp = () => {
isDraggingRef.current = false;
setIsDragging(false);
window.removeEventListener('mousemove', onMove);
window.removeEventListener('mouseup', onUp);
};
window.addEventListener('mousemove', onMove);
window.addEventListener('mouseup', onUp);
},
[setCurrentTime],
[seekToClientX],
);
if (!videoUrl) {
@ -360,8 +398,8 @@ export default function WaveformTimeline() {
<canvas ref={waveCanvasRef} className="absolute inset-0 w-full h-full" />
<canvas
ref={headCanvasRef}
className="absolute inset-0 w-full h-full cursor-crosshair"
onClick={handleClick}
className={`absolute inset-0 w-full h-full ${isDragging ? 'cursor-grabbing' : 'cursor-crosshair'}`}
onMouseDown={handleMouseDown}
onWheel={handleWheel}
/>
</div>

View File

@ -0,0 +1,25 @@
/**
* Dev-only console interceptor.
* Forwards console.error / console.warn to the backend /dev/log endpoint,
* which appends them to webview.log so the agent can read it.
*/
if (import.meta.env.DEV) {
const BACKEND = 'http://127.0.0.1:8000';
type ConsoleFn = (...args: unknown[]) => void;
const forward = (level: string, orig: ConsoleFn): ConsoleFn =>
(...args: unknown[]) => {
orig(...args);
const [first, ...rest] = args;
fetch(`${BACKEND}/dev/log`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ level, message: String(first ?? ''), args: rest.map(String) }),
}).catch(() => {/* backend not running yet */});
};
console.error = forward('error', console.error.bind(console));
console.warn = forward('warn', console.warn.bind(console));
console.log = forward('log', console.log.bind(console));
}

View File

@ -47,8 +47,14 @@ window.electronAPI = {
return typeof result === 'string' ? result : null;
},
saveProject: async (): Promise<string | null> => {
const result = await save({ filters: PROJECT_FILTERS });
return result ?? null;
},
getBackendUrl: (): Promise<string> => {
return invoke<string>('get_backend_url');
// Backend URL is fixed; avoid invoke() which triggers ipc:// CSP errors on Linux/WebKit2GTK
return Promise.resolve('http://127.0.0.1:8000');
},
encryptString: (data: string): Promise<string> => {

View File

@ -1,5 +1,7 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
// Forward console.error/warn/log to backend in dev mode so we can tail webview.log
import './lib/dev-logger';
// Must be imported before App so window.electronAPI is patched before any component runs.
import './lib/tauri-bridge';
import App from './App';

View File

@ -1,10 +1,11 @@
import { create } from 'zustand';
import { temporal } from 'zundo';
import type { Word, Segment, DeletedRange, TranscriptionResult } from '../types/project';
import type { Word, Segment, DeletedRange, TranscriptionResult, ProjectFile } from '../types/project';
interface EditorState {
videoPath: string | null;
videoUrl: string | null;
exportedAudioPath: string | null; // path to modified audio from a previous export
words: Word[];
segments: Segment[];
deletedRanges: DeletedRange[];
@ -29,6 +30,8 @@ interface EditorState {
interface EditorActions {
setBackendUrl: (url: string) => void;
loadVideo: (path: string) => void;
setExportedAudioPath: (path: string | null) => void;
saveProject: () => ProjectFile;
setTranscription: (result: TranscriptionResult) => void;
setCurrentTime: (time: number) => void;
setDuration: (duration: number) => void;
@ -49,6 +52,7 @@ interface EditorActions {
const initialState: EditorState = {
videoPath: null,
videoUrl: null,
exportedAudioPath: null,
words: [],
segments: [],
deletedRanges: [],
@ -75,6 +79,27 @@ export const useEditorStore = create<EditorState & EditorActions>()(
setBackendUrl: (url) => set({ backendUrl: url }),
setExportedAudioPath: (path) => set({ exportedAudioPath: path }),
saveProject: (): ProjectFile => {
const { videoPath, words, segments, deletedRanges, language, exportedAudioPath } = get();
if (!videoPath) throw new Error('No video loaded');
const now = new Date().toISOString();
// Strip globalStartIndex (runtime-only field) before persisting
const persistSegments = segments.map(({ globalStartIndex: _drop, ...rest }) => rest);
return {
version: 1,
videoPath,
exportedAudioPath: exportedAudioPath ?? undefined,
words,
segments: persistSegments as unknown as Segment[],
deletedRanges,
language,
createdAt: now, // will be overwritten if we track original creation time later
modifiedAt: now,
};
},
loadVideo: (path) => {
const backend = get().backendUrl;
const url = `${backend}/file?path=${encodeURIComponent(path)}`;
@ -225,6 +250,7 @@ export const useEditorStore = create<EditorState & EditorActions>()(
segments: annotatedSegments,
deletedRanges: data.deletedRanges || [],
language: data.language || '',
exportedAudioPath: data.exportedAudioPath ?? null,
});
},

View File

@ -29,6 +29,7 @@ export interface DeletedRange extends TimeRange {
export interface ProjectFile {
version: 1;
videoPath: string;
exportedAudioPath?: string; // path to modified/processed audio if it exists
words: Word[];
segments: Segment[];
deletedRanges: DeletedRange[];

View File

@ -4,6 +4,7 @@ interface ElectronAPI {
openFile: (options?: Record<string, unknown>) => Promise<string | null>;
saveFile: (options?: Record<string, unknown>) => Promise<string | null>;
openProject: () => Promise<string | null>;
saveProject: () => Promise<string | null>;
getBackendUrl: () => Promise<string>;
encryptString: (data: string) => Promise<string>;
decryptString: (encrypted: string) => Promise<string>;

View File

@ -6,6 +6,7 @@
"properties": {
"version": { "type": "integer", "const": 1 },
"videoPath": { "type": "string" },
"exportedAudioPath": { "type": "string", "description": "Path to modified/processed audio if it was exported" },
"words": {
"type": "array",
"items": {

View File

@ -11,8 +11,8 @@
"dialog:allow-open",
"dialog:allow-save",
"fs:default",
"fs:allow-read-text-file",
"fs:allow-write-text-file",
{ "identifier": "fs:allow-read-text-file", "allow": [{ "path": "$HOME/**" }] },
{ "identifier": "fs:allow-write-text-file", "allow": [{ "path": "$HOME/**" }] },
"fs:allow-app-read-recursive",
"fs:allow-app-write-recursive"
]