Files
TalkEdit/src-tauri/src/ai_provider.rs

98 lines
2.8 KiB
Rust
Raw Normal View History

2026-03-26 23:39:31 -06:00
use std::process::Command;
use serde_json;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AICompleteResult {
pub response: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct OllamaModelsResult {
pub models: Vec<String>,
}
/// Complete text using AI provider
pub fn complete(
prompt: &str,
provider: &str,
model: Option<&str>,
api_key: Option<&str>,
base_url: Option<&str>,
system_prompt: Option<&str>,
temperature: f64,
) -> Result<String, String> {
let python_exe = crate::paths::python_exe();
let python_exe = python_exe.to_str().unwrap_or_default();
let script_path = crate::paths::backend_script("ai_provider.py");
let script_path = script_path.to_str().unwrap_or_default();
let mut args = vec![script_path, "complete", prompt, provider];
if let Some(m) = model {
args.push(m);
} else {
args.push("null");
}
if let Some(key) = api_key {
args.push(key);
} else {
args.push("null");
}
if let Some(url) = base_url {
args.push(url);
} else {
args.push("null");
}
if let Some(sys) = system_prompt {
args.push(sys);
} else {
args.push("null");
}
let temp_str = temperature.to_string();
args.push(&temp_str);
let output = Command::new(python_exe)
.args(&args)
.output()
.map_err(|e| format!("Failed to run Python script: {}", e))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!("Python script failed: {}", stderr));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let result: AICompleteResult = serde_json::from_str(&stdout.trim())
.map_err(|e| format!("Failed to parse JSON: {}", e))?;
Ok(result.response)
}
/// List available Ollama models
pub fn list_ollama_models(base_url: &str) -> Result<Vec<String>, String> {
let python_exe = crate::paths::python_exe();
let python_exe = python_exe.to_str().unwrap_or_default();
let script_path = crate::paths::backend_script("ai_provider.py");
let script_path = script_path.to_str().unwrap_or_default();
let output = Command::new(python_exe)
.args(&[script_path, "list_ollama_models", base_url])
.output()
.map_err(|e| format!("Failed to run Python script: {}", e))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!("Python script failed: {}", stderr));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let result: OllamaModelsResult = serde_json::from_str(&stdout.trim())
.map_err(|e| format!("Failed to parse JSON: {}", e))?;
Ok(result.models)
}