Files
TalkEdit/src-tauri/src/lib.rs

264 lines
9.7 KiB
Rust
Raw Normal View History

2026-03-25 01:22:30 -06:00
// --- Commands ---
2026-03-26 23:39:31 -06:00
mod paths;
mod transcription;
2026-03-26 23:39:31 -06:00
mod video_editor;
mod audio_cleaner;
mod diarization;
mod ai_provider;
mod caption_generator;
mod background_removal;
2026-03-25 01:22:30 -06:00
/// Returns the backend URL. Stubbed for now; will be replaced once the
/// Python/Rust backend is fully wired up.
#[tauri::command]
fn get_backend_url() -> String {
// During development the Python backend still runs on 8642.
// In production this will be replaced with a local Rust server or IPC.
"http://localhost:8642".to_string()
}
/// Minimal encrypt: base64-encodes the string as a placeholder until a proper
/// OS keychain implementation is added (e.g. tauri-plugin-stronghold).
#[tauri::command]
fn encrypt_string(data: String) -> String {
2026-03-25 01:41:40 -06:00
data
2026-03-25 01:22:30 -06:00
.as_bytes()
.iter()
.fold(String::new(), |mut acc, b| {
use std::fmt::Write as FmtWrite;
let _ = write!(acc, "{:02x}", b);
acc
2026-03-25 01:41:40 -06:00
})
2026-03-25 01:22:30 -06:00
}
/// Companion decode for encrypt_string.
#[tauri::command]
fn decrypt_string(encrypted: String) -> Result<String, String> {
2026-03-25 01:41:40 -06:00
(0..encrypted.len())
2026-03-25 01:22:30 -06:00
.step_by(2)
.map(|i| u8::from_str_radix(&encrypted[i..i + 2], 16))
2026-03-25 01:41:40 -06:00
.collect::<Result<Vec<u8>, _>>()
2026-03-25 01:22:30 -06:00
.map_err(|e| format!("decrypt error: {e}"))
.and_then(|b| String::from_utf8(b).map_err(|e| format!("utf8 error: {e}")))
}
/// Ensure a Whisper model is downloaded, downloading it if not present.
#[tauri::command]
async fn ensure_model(model_name: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
transcription::ensure_model_downloaded(&model_name)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Transcribe audio file using Whisper.cpp (runs on a background thread)
#[tauri::command]
async fn transcribe_audio(file_path: String, model_name: String, language: Option<String>) -> Result<transcription::TranscriptionResult, String> {
tauri::async_runtime::spawn_blocking(move || {
transcription::transcribe_audio(&file_path, &model_name, language.as_deref())
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
2026-03-26 23:39:31 -06:00
/// Export video using stream copy (fast, lossless)
#[tauri::command]
async fn export_stream_copy(input_path: String, output_path: String, keep_segments: serde_json::Value) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_stream_copy(&input_path, &output_path, &keep_segments)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Export video with re-encoding
#[tauri::command]
async fn export_reencode(input_path: String, output_path: String, keep_segments: serde_json::Value, resolution: String, format_hint: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_reencode(&input_path, &output_path, &keep_segments, &resolution, &format_hint)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Export video with re-encoding and subtitles
#[tauri::command]
async fn export_reencode_with_subs(input_path: String, output_path: String, keep_segments: serde_json::Value, subtitle_path: String, resolution: String, format_hint: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_reencode_with_subs(&input_path, &output_path, &keep_segments, &subtitle_path, &resolution, &format_hint)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Get video information
#[tauri::command]
async fn get_video_info(input_path: String) -> Result<video_editor::VideoInfo, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::get_video_info(&input_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Clean audio using DeepFilterNet or FFmpeg fallback
#[tauri::command]
async fn clean_audio(input_path: String, output_path: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
audio_cleaner::clean_audio(&input_path, &output_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Check if DeepFilterNet is available
#[tauri::command]
async fn is_deepfilter_available() -> Result<bool, String> {
tauri::async_runtime::spawn_blocking(move || {
audio_cleaner::is_deepfilter_available()
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Apply speaker diarization to transcription result
#[tauri::command]
async fn diarize_and_label(transcription_result: diarization::TranscriptionResult, audio_path: String, hf_token: Option<String>, num_speakers: Option<u32>, use_gpu: Option<bool>) -> Result<diarization::TranscriptionResult, String> {
let use_gpu = use_gpu.unwrap_or(true);
tauri::async_runtime::spawn_blocking(move || {
diarization::diarize_and_label(&transcription_result, &audio_path, hf_token.as_deref(), num_speakers, use_gpu)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Complete text using AI provider
#[tauri::command]
async fn ai_complete(prompt: String, provider: String, model: Option<String>, api_key: Option<String>, base_url: Option<String>, system_prompt: Option<String>, temperature: Option<f64>) -> Result<String, String> {
let temperature = temperature.unwrap_or(0.3);
tauri::async_runtime::spawn_blocking(move || {
ai_provider::complete(&prompt, &provider, model.as_deref(), api_key.as_deref(), base_url.as_deref(), system_prompt.as_deref(), temperature)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// List available Ollama models
#[tauri::command]
async fn list_ollama_models(base_url: Option<String>) -> Result<Vec<String>, String> {
let base_url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
tauri::async_runtime::spawn_blocking(move || {
ai_provider::list_ollama_models(&base_url)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate SRT caption content
#[tauri::command]
async fn generate_srt(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_srt(&words, deleted_indices.as_ref(), words_per_line)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate VTT caption content
#[tauri::command]
async fn generate_vtt(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_vtt(&words, deleted_indices.as_ref(), words_per_line)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate ASS subtitle content
#[tauri::command]
async fn generate_ass(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>, style: Option<caption_generator::CaptionStyle>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_ass(&words, deleted_indices.as_ref(), words_per_line, style.as_ref())
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Save caption content to file
#[tauri::command]
async fn save_captions(content: String, output_path: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
caption_generator::save_captions(&content, &output_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Check if background removal is available
#[tauri::command]
async fn is_background_removal_available() -> Result<bool, String> {
tauri::async_runtime::spawn_blocking(move || {
background_removal::is_available()
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Remove background on export (placeholder for Phase 5)
#[tauri::command]
async fn remove_background_on_export(input_path: String, output_path: String, replacement: String, replacement_value: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
background_removal::remove_background_on_export(&input_path, &output_path, &replacement, &replacement_value)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
2026-03-25 01:22:30 -06:00
// --- App entry point ---
2026-03-24 23:55:29 -06:00
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
2026-03-25 01:22:30 -06:00
tauri::Builder::default()
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_fs::init())
.setup(|app| {
if cfg!(debug_assertions) {
app.handle().plugin(
tauri_plugin_log::Builder::default()
.level(log::LevelFilter::Info)
.build(),
)?;
}
Ok(())
})
.invoke_handler(tauri::generate_handler![
get_backend_url,
encrypt_string,
decrypt_string,
ensure_model,
transcribe_audio,
2026-03-26 23:39:31 -06:00
export_stream_copy,
export_reencode,
export_reencode_with_subs,
get_video_info,
clean_audio,
is_deepfilter_available,
diarize_and_label,
ai_complete,
list_ollama_models,
generate_srt,
generate_vtt,
generate_ass,
save_captions,
is_background_removal_available,
remove_background_on_export,
2026-03-25 01:22:30 -06:00
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
2026-03-24 23:55:29 -06:00
}