326 lines
11 KiB
Python
326 lines
11 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Ollama Model Manager - Script Web para gestionar modelos de Ollama
|
|
Permite listar, descargar y eliminar modelos de Ollama con interfaz web.
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import socket
|
|
import threading
|
|
import webbrowser
|
|
import time
|
|
import requests
|
|
from datetime import datetime
|
|
from typing import Dict, Any
|
|
from flask import Flask, render_template, jsonify, request
|
|
|
|
# Configuración del path para importar utilidades del proyecto principal
|
|
script_root = os.path.dirname(os.path.dirname(__file__))
|
|
sys.path.append(script_root)
|
|
|
|
# Importar utilidades del proyecto (opcional, para configuración)
|
|
try:
|
|
from ParamManagerScripts.backend.script_utils import load_configuration
|
|
|
|
HAS_CONFIG = True
|
|
except ImportError:
|
|
HAS_CONFIG = False
|
|
msg = (
|
|
"⚠️ No se pudo importar load_configuration, " "usando configuración por defecto"
|
|
)
|
|
print(msg)
|
|
|
|
|
|
class OllamaManager:
|
|
"""Clase para gestionar modelos de Ollama"""
|
|
|
|
def __init__(self, base_url: str = "http://localhost:11434"):
|
|
self.base_url = base_url.rstrip("/")
|
|
self.api_url = f"{self.base_url}/api"
|
|
|
|
def is_ollama_running(self) -> bool:
|
|
"""Verificar si Ollama está ejecutándose"""
|
|
try:
|
|
response = requests.get(f"{self.base_url}/api/tags", timeout=5)
|
|
return response.status_code == 200
|
|
except requests.exceptions.RequestException:
|
|
return False
|
|
|
|
def list_models(self) -> Dict[str, Any]:
|
|
"""Listar todos los modelos instalados"""
|
|
try:
|
|
response = requests.get(f"{self.api_url}/tags", timeout=10)
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
models = data.get("models", [])
|
|
|
|
# Calcular tamaño total
|
|
total_size = sum(model.get("size", 0) for model in models)
|
|
|
|
return {
|
|
"status": "success",
|
|
"models": models,
|
|
"total_models": len(models),
|
|
"total_size": total_size,
|
|
"total_size_human": self._format_bytes(total_size),
|
|
}
|
|
else:
|
|
error_msg = f"Error HTTP: {response.status_code}"
|
|
return {"status": "error", "message": error_msg}
|
|
except requests.exceptions.RequestException as e:
|
|
error_msg = f"Error de conexión: {str(e)}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
def pull_model(self, model_name: str) -> Dict[str, Any]:
|
|
"""Descargar un modelo"""
|
|
try:
|
|
# Iniciar descarga
|
|
response = requests.post(
|
|
f"{self.api_url}/pull",
|
|
json={"name": model_name},
|
|
stream=True,
|
|
timeout=300, # 5 minutos timeout
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
msg = f"Descarga de '{model_name}' iniciada"
|
|
return {"status": "success", "message": msg}
|
|
else:
|
|
error_msg = f"Error al descargar: {response.status_code}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
error_msg = f"Error de conexión: {str(e)}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
def delete_model(self, model_name: str) -> Dict[str, Any]:
|
|
"""Eliminar un modelo"""
|
|
try:
|
|
response = requests.delete(
|
|
f"{self.api_url}/delete", json={"name": model_name}, timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
msg = f"Modelo '{model_name}' eliminado correctamente"
|
|
return {"status": "success", "message": msg}
|
|
else:
|
|
error_msg = f"Error al eliminar: {response.status_code}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
error_msg = f"Error de conexión: {str(e)}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
def get_model_info(self, model_name: str) -> Dict[str, Any]:
|
|
"""Obtener información detallada de un modelo"""
|
|
try:
|
|
response = requests.post(
|
|
f"{self.api_url}/show", json={"name": model_name}, timeout=10
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
return {"status": "success", "info": response.json()}
|
|
else:
|
|
error_msg = f"Error al obtener info: {response.status_code}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
error_msg = f"Error de conexión: {str(e)}"
|
|
return {"status": "error", "message": error_msg}
|
|
|
|
def _format_bytes(self, bytes_size: int) -> str:
|
|
"""Formatear bytes a formato legible"""
|
|
if bytes_size == 0:
|
|
return "0 B"
|
|
|
|
for unit in ["B", "KB", "MB", "GB", "TB"]:
|
|
if bytes_size < 1024.0:
|
|
return f"{bytes_size:.1f} {unit}"
|
|
bytes_size /= 1024.0
|
|
return f"{bytes_size:.1f} PB"
|
|
|
|
|
|
def find_free_port() -> int:
|
|
"""Encontrar un puerto libre"""
|
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
s.bind(("", 0))
|
|
return s.getsockname()[1]
|
|
|
|
|
|
def create_app() -> Flask:
|
|
"""Crear la aplicación Flask"""
|
|
app = Flask(__name__)
|
|
|
|
# Configurar directorio de templates
|
|
template_dir = os.path.join(os.path.dirname(__file__), "templates")
|
|
if not os.path.exists(template_dir):
|
|
os.makedirs(template_dir)
|
|
app.template_folder = template_dir
|
|
|
|
# Configurar directorio de archivos estáticos
|
|
static_dir = os.path.join(os.path.dirname(__file__), "static")
|
|
if not os.path.exists(static_dir):
|
|
os.makedirs(static_dir)
|
|
app.static_folder = static_dir
|
|
|
|
# Inicializar gestor de Ollama
|
|
ollama_manager = OllamaManager()
|
|
|
|
@app.route("/")
|
|
def index():
|
|
"""Página principal"""
|
|
return render_template("index.html")
|
|
|
|
@app.route("/api/status")
|
|
def api_status():
|
|
"""Verificar estado de Ollama"""
|
|
is_running = ollama_manager.is_ollama_running()
|
|
return jsonify(
|
|
{
|
|
"status": "success",
|
|
"ollama_running": is_running,
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
)
|
|
|
|
@app.route("/api/models")
|
|
def api_models():
|
|
"""Listar modelos"""
|
|
return jsonify(ollama_manager.list_models())
|
|
|
|
@app.route("/api/models/<model_name>/info")
|
|
def api_model_info(model_name):
|
|
"""Obtener información de un modelo"""
|
|
return jsonify(ollama_manager.get_model_info(model_name))
|
|
|
|
@app.route("/api/models/pull", methods=["POST"])
|
|
def api_pull_model():
|
|
"""Descargar un modelo"""
|
|
data = request.get_json()
|
|
if not data or "name" not in data:
|
|
error_msg = "Nombre del modelo requerido"
|
|
return jsonify({"status": "error", "message": error_msg}), 400
|
|
|
|
model_name = data["name"].strip()
|
|
if not model_name:
|
|
error_msg = "Nombre del modelo no puede estar vacío"
|
|
return jsonify({"status": "error", "message": error_msg}), 400
|
|
|
|
result = ollama_manager.pull_model(model_name)
|
|
return jsonify(result)
|
|
|
|
@app.route("/api/models/<model_name>", methods=["DELETE"])
|
|
def api_delete_model(model_name):
|
|
"""Eliminar un modelo"""
|
|
result = ollama_manager.delete_model(model_name)
|
|
return jsonify(result)
|
|
|
|
@app.errorhandler(404)
|
|
def not_found(error):
|
|
error_msg = "Endpoint no encontrado"
|
|
return jsonify({"status": "error", "message": error_msg}), 404
|
|
|
|
@app.errorhandler(500)
|
|
def internal_error(error):
|
|
error_msg = "Error interno del servidor"
|
|
return jsonify({"status": "error", "message": error_msg}), 500
|
|
|
|
@app.route("/_shutdown", methods=["POST"])
|
|
def shutdown_route():
|
|
"""Endpoint interno para cerrar la aplicación"""
|
|
print("🛑 Solicitud de cierre recibida desde la interfaz web")
|
|
|
|
def shutdown_server():
|
|
time.sleep(0.1) # Pequeña pausa para permitir respuesta HTTP
|
|
try:
|
|
# Intentar cerrar el servidor Flask de manera elegante
|
|
import os
|
|
import signal
|
|
|
|
os.kill(os.getpid(), signal.SIGINT)
|
|
except Exception as e:
|
|
print(f"Error al cerrar servidor: {e}")
|
|
# Fallback: forzar salida
|
|
os._exit(0)
|
|
|
|
# Ejecutar cierre en hilo separado
|
|
shutdown_thread = threading.Thread(target=shutdown_server, daemon=True)
|
|
shutdown_thread.start()
|
|
|
|
return (
|
|
jsonify(
|
|
{"status": "success", "message": "Cerrando Ollama Model Manager..."}
|
|
),
|
|
200,
|
|
)
|
|
|
|
return app
|
|
|
|
|
|
def main():
|
|
"""Función principal"""
|
|
print("🦙 Ollama Model Manager - Iniciando...")
|
|
|
|
# Cargar configuración si está disponible
|
|
configs = {}
|
|
if HAS_CONFIG:
|
|
try:
|
|
configs = load_configuration()
|
|
print("✅ Configuración cargada correctamente")
|
|
except Exception as e:
|
|
print(f"⚠️ Error al cargar configuración: {e}")
|
|
|
|
# Obtener configuración
|
|
ollama_host = configs.get("ollama_host", "http://localhost:11434")
|
|
auto_open_browser = configs.get("auto_open_browser", True)
|
|
|
|
# Crear aplicación Flask
|
|
app = create_app()
|
|
|
|
# Encontrar puerto libre
|
|
port = find_free_port()
|
|
|
|
# URL de la aplicación
|
|
app_url = f"http://127.0.0.1:{port}"
|
|
|
|
print(f"🌐 Servidor iniciado en: {app_url}")
|
|
print(f"🦙 Ollama Host: {ollama_host}")
|
|
print("📋 Funcionalidades disponibles:")
|
|
print(" - Listar modelos instalados")
|
|
print(" - Ver información detallada de modelos")
|
|
print(" - Descargar nuevos modelos")
|
|
print(" - Eliminar modelos existentes")
|
|
print(" - Monitoreo de espacio ocupado")
|
|
print()
|
|
print("⏹️ Presiona Ctrl+C para cerrar el servidor")
|
|
|
|
# Abrir navegador automáticamente
|
|
if auto_open_browser:
|
|
|
|
def open_browser():
|
|
time.sleep(1.0) # Esperar a que Flask inicie
|
|
try:
|
|
webbrowser.open(app_url)
|
|
print(f"🌐 Navegador abierto en: {app_url}")
|
|
except Exception as e:
|
|
print(f"⚠️ No se pudo abrir el navegador automáticamente: {e}")
|
|
print(f"🌐 Abrir manualmente: {app_url}")
|
|
|
|
timer = threading.Timer(1.0, open_browser)
|
|
timer.start()
|
|
|
|
try:
|
|
# Iniciar servidor Flask
|
|
app.run(host="127.0.0.1", port=port, debug=False, use_reloader=False)
|
|
except KeyboardInterrupt:
|
|
print("\n🛑 Cerrando Ollama Model Manager...")
|
|
except Exception as e:
|
|
print(f"❌ Error al iniciar el servidor: {e}")
|
|
finally:
|
|
print("👋 Ollama Model Manager cerrado")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|