54 lines
1.7 KiB
Python
54 lines
1.7 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
AI provider interface for Ollama, OpenAI, and Claude.
|
|
"""
|
|
|
|
import json
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
# Add backend to path
|
|
sys.path.insert(0, str(Path(__file__).parent))
|
|
|
|
from services.ai_provider import AIProvider
|
|
|
|
|
|
def main():
|
|
if len(sys.argv) < 2:
|
|
print("Usage: python ai_provider.py <command> [args...]", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
command = sys.argv[1]
|
|
|
|
try:
|
|
if command == "complete":
|
|
if len(sys.argv) < 4:
|
|
print("Usage: python ai_provider.py complete <prompt> <provider> [model] [api_key] [base_url] [system_prompt] [temperature]", file=sys.stderr)
|
|
sys.exit(1)
|
|
prompt = sys.argv[2]
|
|
provider = sys.argv[3]
|
|
model = sys.argv[4] if len(sys.argv) > 4 else None
|
|
api_key = sys.argv[5] if len(sys.argv) > 5 else None
|
|
base_url = sys.argv[6] if len(sys.argv) > 6 else None
|
|
system_prompt = sys.argv[7] if len(sys.argv) > 7 else None
|
|
temperature = float(sys.argv[8]) if len(sys.argv) > 8 else 0.3
|
|
|
|
result = AIProvider.complete(prompt, provider, model, api_key, base_url, system_prompt, temperature)
|
|
print(json.dumps({"response": result}))
|
|
|
|
elif command == "list_ollama_models":
|
|
base_url = sys.argv[2] if len(sys.argv) > 2 else "http://localhost:11434"
|
|
result = AIProvider.list_ollama_models(base_url)
|
|
print(json.dumps({"models": result}))
|
|
|
|
else:
|
|
print(f"Unknown command: {command}", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
except Exception as e:
|
|
print(json.dumps({"error": str(e)}), file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main() |