266 lines
11 KiB
Python
266 lines
11 KiB
Python
"""
|
|
Project Model
|
|
Modelo de datos para manejo de proyectos y su estado
|
|
"""
|
|
|
|
import json
|
|
from datetime import datetime, timezone
|
|
from enum import Enum
|
|
from pathlib import Path
|
|
from typing import Dict, List, Any, Optional
|
|
|
|
|
|
class ProjectStatus(Enum):
|
|
"""Estados posibles de un proyecto"""
|
|
READY = "ready"
|
|
BACKING_UP = "backing_up"
|
|
ERROR = "error"
|
|
FILES_IN_USE = "files_in_use"
|
|
RETRY_PENDING = "retry_pending"
|
|
DISABLED = "disabled"
|
|
|
|
|
|
class Project:
|
|
"""Clase para representar un proyecto individual"""
|
|
|
|
def __init__(self, project_data: Dict[str, Any]):
|
|
self.id = project_data.get("id", "")
|
|
self.name = project_data.get("name", "")
|
|
self.path = project_data.get("path", "")
|
|
self.type = project_data.get("type", "")
|
|
self.s7p_file = project_data.get("s7p_file", "")
|
|
self.observation_directory = project_data.get("observation_directory", "")
|
|
self.relative_path = project_data.get("relative_path", "")
|
|
self.backup_path = project_data.get("backup_path", "")
|
|
|
|
# Configuración de schedule
|
|
schedule_config = project_data.get("schedule_config", {})
|
|
self.schedule = schedule_config.get("schedule", "daily")
|
|
self.schedule_time = schedule_config.get("schedule_time", "02:00")
|
|
self.enabled = schedule_config.get("enabled", True)
|
|
self.next_scheduled_backup = schedule_config.get("next_scheduled_backup", "")
|
|
|
|
# Historia de backups
|
|
backup_history = project_data.get("backup_history", {})
|
|
self.last_backup_date = backup_history.get("last_backup_date", "")
|
|
self.last_backup_file = backup_history.get("last_backup_file", "")
|
|
self.backup_count = backup_history.get("backup_count", 0)
|
|
self.last_successful_backup = backup_history.get("last_successful_backup", "")
|
|
|
|
# Información de hash
|
|
hash_info = project_data.get("hash_info", {})
|
|
self.last_s7p_hash = hash_info.get("last_s7p_hash", "")
|
|
self.last_full_hash = hash_info.get("last_full_hash", "")
|
|
self.last_s7p_timestamp = hash_info.get("last_s7p_timestamp", "")
|
|
self.last_s7p_size = hash_info.get("last_s7p_size", 0)
|
|
self.last_scan_timestamp = hash_info.get("last_scan_timestamp", "")
|
|
self.file_count = hash_info.get("file_count", 0)
|
|
self.total_size_bytes = hash_info.get("total_size_bytes", 0)
|
|
|
|
# Estado actual
|
|
status = project_data.get("status", {})
|
|
self.current_status = ProjectStatus(status.get("current_status", "ready"))
|
|
self.last_error = status.get("last_error", None)
|
|
self.retry_count = status.get("retry_count", 0)
|
|
self.next_retry = status.get("next_retry", None)
|
|
self.files_in_use = status.get("files_in_use", False)
|
|
self.exclusivity_check_passed = status.get("exclusivity_check_passed", True)
|
|
self.last_status_update = status.get("last_status_update", "")
|
|
|
|
# Información de descubrimiento
|
|
discovery_info = project_data.get("discovery_info", {})
|
|
self.discovered_date = discovery_info.get("discovered_date", "")
|
|
self.discovery_method = discovery_info.get("discovery_method", "")
|
|
self.auto_discovered = discovery_info.get("auto_discovered", True)
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
"""Convertir el proyecto a diccionario para serialización JSON"""
|
|
return {
|
|
"id": self.id,
|
|
"name": self.name,
|
|
"path": self.path,
|
|
"type": self.type,
|
|
"s7p_file": self.s7p_file,
|
|
"observation_directory": self.observation_directory,
|
|
"relative_path": self.relative_path,
|
|
"backup_path": self.backup_path,
|
|
"schedule_config": {
|
|
"schedule": self.schedule,
|
|
"schedule_time": self.schedule_time,
|
|
"enabled": self.enabled,
|
|
"next_scheduled_backup": self.next_scheduled_backup
|
|
},
|
|
"backup_history": {
|
|
"last_backup_date": self.last_backup_date,
|
|
"last_backup_file": self.last_backup_file,
|
|
"backup_count": self.backup_count,
|
|
"last_successful_backup": self.last_successful_backup
|
|
},
|
|
"hash_info": {
|
|
"last_s7p_hash": self.last_s7p_hash,
|
|
"last_full_hash": self.last_full_hash,
|
|
"last_s7p_timestamp": self.last_s7p_timestamp,
|
|
"last_s7p_size": self.last_s7p_size,
|
|
"last_scan_timestamp": self.last_scan_timestamp,
|
|
"file_count": self.file_count,
|
|
"total_size_bytes": self.total_size_bytes
|
|
},
|
|
"status": {
|
|
"current_status": self.current_status.value,
|
|
"last_error": self.last_error,
|
|
"retry_count": self.retry_count,
|
|
"next_retry": self.next_retry,
|
|
"files_in_use": self.files_in_use,
|
|
"exclusivity_check_passed": self.exclusivity_check_passed,
|
|
"last_status_update": self.last_status_update
|
|
},
|
|
"discovery_info": {
|
|
"discovered_date": self.discovered_date,
|
|
"discovery_method": self.discovery_method,
|
|
"auto_discovered": self.auto_discovered
|
|
}
|
|
}
|
|
|
|
def update_status(self, status: ProjectStatus, error_message: str = None) -> None:
|
|
"""Actualizar el estado del proyecto"""
|
|
self.current_status = status
|
|
self.last_error = error_message
|
|
self.last_status_update = datetime.now(timezone.utc).isoformat()
|
|
|
|
if status == ProjectStatus.ERROR:
|
|
self.retry_count += 1
|
|
elif status == ProjectStatus.READY:
|
|
self.retry_count = 0
|
|
self.last_error = None
|
|
|
|
def update_hash_info(self, s7p_hash: str = None, full_hash: str = None,
|
|
s7p_timestamp: str = None, s7p_size: int = None,
|
|
file_count: int = None, total_size: int = None) -> None:
|
|
"""Actualizar información de hash"""
|
|
if s7p_hash is not None:
|
|
self.last_s7p_hash = s7p_hash
|
|
if full_hash is not None:
|
|
self.last_full_hash = full_hash
|
|
if s7p_timestamp is not None:
|
|
self.last_s7p_timestamp = s7p_timestamp
|
|
if s7p_size is not None:
|
|
self.last_s7p_size = s7p_size
|
|
if file_count is not None:
|
|
self.file_count = file_count
|
|
if total_size is not None:
|
|
self.total_size_bytes = total_size
|
|
|
|
self.last_scan_timestamp = datetime.now(timezone.utc).isoformat()
|
|
|
|
def update_backup_info(self, backup_file_path: str) -> None:
|
|
"""Actualizar información después de un backup exitoso"""
|
|
now = datetime.now(timezone.utc).isoformat()
|
|
self.last_backup_date = now
|
|
self.last_successful_backup = now
|
|
self.last_backup_file = backup_file_path
|
|
self.backup_count += 1
|
|
self.update_status(ProjectStatus.READY)
|
|
|
|
|
|
class ProjectManager:
|
|
"""Clase para manejar la colección de proyectos"""
|
|
|
|
def __init__(self, projects_file_path: str = None):
|
|
if projects_file_path is None:
|
|
# Buscar projects.json en el directorio del proyecto
|
|
current_dir = Path(__file__).parent.parent.parent
|
|
projects_file_path = current_dir / "projects.json"
|
|
|
|
self.projects_file_path = Path(projects_file_path)
|
|
self.projects: Dict[str, Project] = {}
|
|
self.metadata = {}
|
|
self.statistics = {}
|
|
self.load_projects()
|
|
|
|
def load_projects(self) -> None:
|
|
"""Cargar proyectos desde archivo JSON"""
|
|
try:
|
|
if self.projects_file_path.exists():
|
|
with open(self.projects_file_path, 'r', encoding='utf-8') as f:
|
|
data = json.load(f)
|
|
|
|
self.metadata = data.get("metadata", {})
|
|
self.statistics = data.get("statistics", {})
|
|
|
|
# Cargar proyectos
|
|
for project_data in data.get("projects", []):
|
|
project = Project(project_data)
|
|
self.projects[project.id] = project
|
|
else:
|
|
# Crear archivo por defecto si no existe
|
|
self._create_default_projects_file()
|
|
self.save_projects()
|
|
except Exception as e:
|
|
raise Exception(f"Error cargando proyectos: {e}")
|
|
|
|
def save_projects(self) -> None:
|
|
"""Guardar proyectos al archivo JSON"""
|
|
try:
|
|
data = {
|
|
"metadata": self.metadata,
|
|
"projects": [project.to_dict() for project in self.projects.values()],
|
|
"statistics": self.statistics
|
|
}
|
|
|
|
with open(self.projects_file_path, 'w', encoding='utf-8') as f:
|
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
except Exception as e:
|
|
raise Exception(f"Error guardando proyectos: {e}")
|
|
|
|
def _create_default_projects_file(self) -> None:
|
|
"""Crear archivo de proyectos por defecto"""
|
|
self.metadata = {
|
|
"version": "1.0",
|
|
"last_updated": datetime.now(timezone.utc).isoformat(),
|
|
"total_projects": 0
|
|
}
|
|
|
|
self.statistics = {
|
|
"total_backups_created": 0,
|
|
"total_backup_size_mb": 0.0,
|
|
"average_backup_time_seconds": 0.0,
|
|
"last_global_scan": "",
|
|
"projects_with_errors": 0,
|
|
"projects_pending_retry": 0
|
|
}
|
|
|
|
def add_project(self, project_data: Dict[str, Any]) -> Project:
|
|
"""Agregar un nuevo proyecto"""
|
|
project = Project(project_data)
|
|
self.projects[project.id] = project
|
|
self.metadata["total_projects"] = len(self.projects)
|
|
self.metadata["last_updated"] = datetime.now(timezone.utc).isoformat()
|
|
self.save_projects()
|
|
return project
|
|
|
|
def get_project(self, project_id: str) -> Optional[Project]:
|
|
"""Obtener un proyecto por ID"""
|
|
return self.projects.get(project_id)
|
|
|
|
def get_all_projects(self) -> List[Project]:
|
|
"""Obtener todos los proyectos"""
|
|
return list(self.projects.values())
|
|
|
|
def get_projects_by_status(self, status: ProjectStatus) -> List[Project]:
|
|
"""Obtener proyectos por estado"""
|
|
return [p for p in self.projects.values() if p.current_status == status]
|
|
|
|
def get_enabled_projects(self) -> List[Project]:
|
|
"""Obtener proyectos habilitados"""
|
|
return [p for p in self.projects.values() if p.enabled]
|
|
|
|
def update_statistics(self) -> None:
|
|
"""Actualizar estadísticas generales"""
|
|
self.statistics["projects_with_errors"] = len(
|
|
self.get_projects_by_status(ProjectStatus.ERROR)
|
|
)
|
|
self.statistics["projects_pending_retry"] = len(
|
|
self.get_projects_by_status(ProjectStatus.RETRY_PENDING)
|
|
)
|
|
self.save_projects()
|