added api for ai; got backend working

This commit is contained in:
2026-03-26 23:39:31 -06:00
parent 164b2f87d4
commit 4a857d8cbf
20 changed files with 1436 additions and 280 deletions

View File

@ -1,6 +1,13 @@
// --- Commands ---
mod paths;
mod transcription;
mod video_editor;
mod audio_cleaner;
mod diarization;
mod ai_provider;
mod caption_generator;
mod background_removal;
/// Returns the backend URL. Stubbed for now; will be replaced once the
/// Python/Rust backend is fully wired up.
@ -56,6 +63,162 @@ async fn transcribe_audio(file_path: String, model_name: String, language: Optio
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Export video using stream copy (fast, lossless)
#[tauri::command]
async fn export_stream_copy(input_path: String, output_path: String, keep_segments: serde_json::Value) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_stream_copy(&input_path, &output_path, &keep_segments)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Export video with re-encoding
#[tauri::command]
async fn export_reencode(input_path: String, output_path: String, keep_segments: serde_json::Value, resolution: String, format_hint: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_reencode(&input_path, &output_path, &keep_segments, &resolution, &format_hint)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Export video with re-encoding and subtitles
#[tauri::command]
async fn export_reencode_with_subs(input_path: String, output_path: String, keep_segments: serde_json::Value, subtitle_path: String, resolution: String, format_hint: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::export_reencode_with_subs(&input_path, &output_path, &keep_segments, &subtitle_path, &resolution, &format_hint)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Get video information
#[tauri::command]
async fn get_video_info(input_path: String) -> Result<video_editor::VideoInfo, String> {
tauri::async_runtime::spawn_blocking(move || {
video_editor::get_video_info(&input_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Clean audio using DeepFilterNet or FFmpeg fallback
#[tauri::command]
async fn clean_audio(input_path: String, output_path: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
audio_cleaner::clean_audio(&input_path, &output_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Check if DeepFilterNet is available
#[tauri::command]
async fn is_deepfilter_available() -> Result<bool, String> {
tauri::async_runtime::spawn_blocking(move || {
audio_cleaner::is_deepfilter_available()
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Apply speaker diarization to transcription result
#[tauri::command]
async fn diarize_and_label(transcription_result: diarization::TranscriptionResult, audio_path: String, hf_token: Option<String>, num_speakers: Option<u32>, use_gpu: Option<bool>) -> Result<diarization::TranscriptionResult, String> {
let use_gpu = use_gpu.unwrap_or(true);
tauri::async_runtime::spawn_blocking(move || {
diarization::diarize_and_label(&transcription_result, &audio_path, hf_token.as_deref(), num_speakers, use_gpu)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Complete text using AI provider
#[tauri::command]
async fn ai_complete(prompt: String, provider: String, model: Option<String>, api_key: Option<String>, base_url: Option<String>, system_prompt: Option<String>, temperature: Option<f64>) -> Result<String, String> {
let temperature = temperature.unwrap_or(0.3);
tauri::async_runtime::spawn_blocking(move || {
ai_provider::complete(&prompt, &provider, model.as_deref(), api_key.as_deref(), base_url.as_deref(), system_prompt.as_deref(), temperature)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// List available Ollama models
#[tauri::command]
async fn list_ollama_models(base_url: Option<String>) -> Result<Vec<String>, String> {
let base_url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
tauri::async_runtime::spawn_blocking(move || {
ai_provider::list_ollama_models(&base_url)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate SRT caption content
#[tauri::command]
async fn generate_srt(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_srt(&words, deleted_indices.as_ref(), words_per_line)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate VTT caption content
#[tauri::command]
async fn generate_vtt(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_vtt(&words, deleted_indices.as_ref(), words_per_line)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Generate ASS subtitle content
#[tauri::command]
async fn generate_ass(words: Vec<caption_generator::Word>, deleted_indices: Option<std::collections::HashSet<usize>>, words_per_line: Option<usize>, style: Option<caption_generator::CaptionStyle>) -> Result<String, String> {
let words_per_line = words_per_line.unwrap_or(8);
tauri::async_runtime::spawn_blocking(move || {
caption_generator::generate_ass(&words, deleted_indices.as_ref(), words_per_line, style.as_ref())
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Save caption content to file
#[tauri::command]
async fn save_captions(content: String, output_path: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
caption_generator::save_captions(&content, &output_path)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Check if background removal is available
#[tauri::command]
async fn is_background_removal_available() -> Result<bool, String> {
tauri::async_runtime::spawn_blocking(move || {
background_removal::is_available()
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
/// Remove background on export (placeholder for Phase 5)
#[tauri::command]
async fn remove_background_on_export(input_path: String, output_path: String, replacement: String, replacement_value: String) -> Result<String, String> {
tauri::async_runtime::spawn_blocking(move || {
background_removal::remove_background_on_export(&input_path, &output_path, &replacement, &replacement_value)
})
.await
.map_err(|e| format!("Task error: {:?}", e))?
}
// --- App entry point ---
#[cfg_attr(mobile, tauri::mobile_entry_point)]
@ -79,6 +242,21 @@ pub fn run() {
decrypt_string,
ensure_model,
transcribe_audio,
export_stream_copy,
export_reencode,
export_reencode_with_subs,
get_video_info,
clean_audio,
is_deepfilter_available,
diarize_and_label,
ai_complete,
list_ollama_models,
generate_srt,
generate_vtt,
generate_ass,
save_captions,
is_background_removal_available,
remove_background_on_export,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");