still working on crashes
This commit is contained in:
@ -49,6 +49,7 @@ export default function App() {
|
||||
const [cutMode, setCutMode] = useState(false);
|
||||
const [muteMode, setMuteMode] = useState(false);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
const lastVideoPathRef = useRef<string | null>(null);
|
||||
|
||||
useKeyboardShortcuts();
|
||||
|
||||
@ -74,13 +75,44 @@ export default function App() {
|
||||
// The backend URL is fixed at 127.0.0.1:8000 so we rely on the store default.
|
||||
}, [setBackendUrl]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!import.meta.env.DEV) return;
|
||||
const previousVideoPath = lastVideoPathRef.current;
|
||||
if (previousVideoPath !== videoPath) {
|
||||
console.log('[app-state] videoPath transition', {
|
||||
from: previousVideoPath,
|
||||
to: videoPath,
|
||||
wordCount: words.length,
|
||||
isTranscribing,
|
||||
});
|
||||
if (previousVideoPath && !videoPath) {
|
||||
console.warn('[app-state] videoPath cleared and UI will show welcome screen', {
|
||||
previousVideoPath,
|
||||
wordCount: words.length,
|
||||
isTranscribing,
|
||||
});
|
||||
}
|
||||
lastVideoPathRef.current = videoPath;
|
||||
}
|
||||
}, [videoPath, words.length, isTranscribing]);
|
||||
|
||||
const handleLoadProject = async () => {
|
||||
if (!IS_DESKTOP) return;
|
||||
try {
|
||||
if (import.meta.env.DEV) console.log('[app-action] loadProject:dialogOpen');
|
||||
const projectPath = await window.desktopAPI!.openProject();
|
||||
if (import.meta.env.DEV) console.log('[app-action] loadProject:dialogResult', { projectPath });
|
||||
if (!projectPath) return;
|
||||
const content = await window.desktopAPI!.readFile(projectPath);
|
||||
const data = JSON.parse(content);
|
||||
if (import.meta.env.DEV) {
|
||||
console.log('[app-action] loadProject:parsed', {
|
||||
projectPath,
|
||||
videoPath: data?.videoPath,
|
||||
words: Array.isArray(data?.words) ? data.words.length : null,
|
||||
segments: Array.isArray(data?.segments) ? data.segments.length : null,
|
||||
});
|
||||
}
|
||||
useEditorStore.getState().loadProject(data);
|
||||
} catch (err) {
|
||||
console.error('Failed to load project:', err);
|
||||
@ -104,8 +136,11 @@ export default function App() {
|
||||
|
||||
const handleOpenFile = async () => {
|
||||
if (IS_DESKTOP) {
|
||||
if (import.meta.env.DEV) console.log('[app-action] openFile:dialogOpen');
|
||||
const path = await window.desktopAPI!.openFile();
|
||||
if (import.meta.env.DEV) console.log('[app-action] openFile:dialogResult', { path });
|
||||
if (path) {
|
||||
if (import.meta.env.DEV) console.log('[app-action] openFile:loadVideo', { path });
|
||||
loadVideo(path);
|
||||
await transcribeVideo(path);
|
||||
}
|
||||
@ -113,6 +148,7 @@ export default function App() {
|
||||
// Browser: use the manual path input
|
||||
const path = manualPath.trim();
|
||||
if (path) {
|
||||
if (import.meta.env.DEV) console.log('[app-action] openFile:webManualPath', { path });
|
||||
loadVideo(path);
|
||||
await transcribeVideo(path);
|
||||
}
|
||||
@ -123,11 +159,13 @@ export default function App() {
|
||||
e.preventDefault();
|
||||
const path = manualPath.trim();
|
||||
if (!path) return;
|
||||
if (import.meta.env.DEV) console.log('[app-action] manualSubmit:loadVideo', { path });
|
||||
loadVideo(path);
|
||||
await transcribeVideo(path);
|
||||
};
|
||||
|
||||
const transcribeVideo = async (path: string) => {
|
||||
if (import.meta.env.DEV) console.log('[app-action] transcribe:start', { path, whisperModel });
|
||||
setTranscribing(true, 0, 'Checking model...');
|
||||
try {
|
||||
if (!window.desktopAPI?.transcribe) {
|
||||
@ -154,15 +192,25 @@ export default function App() {
|
||||
const modelLabel = MODEL_SIZES[whisperModel] ?? 'unknown size';
|
||||
setTranscribing(true, 5, `Downloading ${whisperModel} model (${modelLabel})...`);
|
||||
await window.desktopAPI.ensureModel(whisperModel);
|
||||
if (import.meta.env.DEV) console.log('[app-action] transcribe:modelReady', { whisperModel });
|
||||
|
||||
// Step 2: run transcription
|
||||
setTranscribing(true, 20, 'Transcribing audio...');
|
||||
const data = await window.desktopAPI.transcribe(path, whisperModel);
|
||||
if (import.meta.env.DEV) {
|
||||
console.log('[app-action] transcribe:result', {
|
||||
path,
|
||||
words: Array.isArray(data?.words) ? data.words.length : null,
|
||||
segments: Array.isArray(data?.segments) ? data.segments.length : null,
|
||||
language: data?.language,
|
||||
});
|
||||
}
|
||||
setTranscription(data);
|
||||
} catch (err) {
|
||||
console.error('Transcription error:', err);
|
||||
alert(`Transcription failed. Check the console for details.\n\n${err}`);
|
||||
} finally {
|
||||
if (import.meta.env.DEV) console.log('[app-action] transcribe:finish', { path });
|
||||
setTranscribing(false);
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user