Compare commits
5 Commits
6ffdec7a9a
...
89fc7f106b
Author | SHA1 | Date |
---|---|---|
|
89fc7f106b | |
|
a246c0265c | |
|
006e2ed7d6 | |
|
c759a756be | |
|
d63abc971c |
|
@ -14,7 +14,6 @@ dist/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
lib/
|
|
||||||
lib64/
|
lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
|
|
Binary file not shown.
81
app.py
81
app.py
|
@ -1,6 +1,6 @@
|
||||||
from flask import Flask, render_template, request, jsonify, url_for
|
from flask import Flask, render_template, request, jsonify, url_for
|
||||||
from flask_sock import Sock
|
from flask_sock import Sock
|
||||||
from config_manager import ConfigurationManager
|
from lib.config_manager import ConfigurationManager
|
||||||
import os
|
import os
|
||||||
import json # Added import
|
import json # Added import
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -46,7 +46,7 @@ def broadcast_message(message):
|
||||||
dead_connections = set()
|
dead_connections = set()
|
||||||
timestamp = datetime.now().strftime("[%H:%M:%S] ")
|
timestamp = datetime.now().strftime("[%H:%M:%S] ")
|
||||||
|
|
||||||
# Si es una lista de mensajes, procesar cada uno
|
# Normalize input to a list of messages
|
||||||
if isinstance(message, list):
|
if isinstance(message, list):
|
||||||
messages = message
|
messages = message
|
||||||
else:
|
else:
|
||||||
|
@ -54,32 +54,32 @@ def broadcast_message(message):
|
||||||
messages = [line.strip() for line in message.splitlines() if line.strip()]
|
messages = [line.strip() for line in message.splitlines() if line.strip()]
|
||||||
|
|
||||||
# Procesar cada mensaje
|
# Procesar cada mensaje
|
||||||
for msg in messages:
|
for raw_msg in messages:
|
||||||
# Limpiar timestamps duplicados al inicio del mensaje
|
# Limpiar timestamps duplicados al inicio del mensaje
|
||||||
while msg.startswith("[") and "]" in msg:
|
while raw_msg.startswith("[") and "]" in raw_msg:
|
||||||
try:
|
try:
|
||||||
closing_bracket = msg.index("]") + 1
|
closing_bracket = raw_msg.index("]") + 1
|
||||||
if msg[1 : closing_bracket - 1].replace(":", "").isdigit():
|
if raw_msg[1 : closing_bracket - 1].replace(":", "").isdigit():
|
||||||
msg = msg[closing_bracket:].strip()
|
raw_msg = raw_msg[closing_bracket:].strip() # Update raw_msg itself
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Añadir un único timestamp
|
# Log the raw message using the config_manager's logger
|
||||||
formatted_msg = f"{timestamp}{msg}"
|
# The logger will handle its own timestamping for the file.
|
||||||
|
config_manager.append_log(raw_msg)
|
||||||
|
|
||||||
# Escribir en el archivo de log
|
# Format message with timestamp *for WebSocket broadcast*
|
||||||
with open(config_manager.log_file, "a", encoding="utf-8") as f:
|
formatted_msg_for_ws = f"{timestamp}{raw_msg}"
|
||||||
f.write(f"{formatted_msg}\n")
|
|
||||||
|
|
||||||
# Enviar a todos los clientes WebSocket
|
# Enviar a todos los clientes WebSocket
|
||||||
for ws in list(websocket_connections):
|
for ws in list(websocket_connections):
|
||||||
try:
|
try:
|
||||||
if ws.connected:
|
if ws.connected: # Check if ws is still connected before sending
|
||||||
ws.send(f"{formatted_msg}\n")
|
ws.send(f"{formatted_msg_for_ws}\n") # Use the correct variable name here
|
||||||
except Exception:
|
except Exception:
|
||||||
dead_connections.add(ws)
|
dead_connections.add(ws) # Collect dead connections
|
||||||
|
|
||||||
# Limpiar conexiones muertas
|
# Limpiar conexiones muertas
|
||||||
websocket_connections.difference_update(dead_connections)
|
websocket_connections.difference_update(dead_connections)
|
||||||
|
@ -136,7 +136,10 @@ def handle_schema(level):
|
||||||
|
|
||||||
@app.route("/api/scripts/<group>")
|
@app.route("/api/scripts/<group>")
|
||||||
def get_scripts(group):
|
def get_scripts(group):
|
||||||
return jsonify(config_manager.list_scripts(group))
|
# list_scripts ahora devuelve detalles y filtra los ocultos
|
||||||
|
scripts = config_manager.list_scripts(group)
|
||||||
|
# El frontend espera 'name' y 'description', mapeamos desde 'display_name' y 'short_description'
|
||||||
|
return jsonify([{"name": s['display_name'], "description": s['short_description'], "filename": s['filename'], "long_description": s['long_description']} for s in scripts])
|
||||||
|
|
||||||
|
|
||||||
@app.route("/api/working-directory", methods=["POST"])
|
@app.route("/api/working-directory", methods=["POST"])
|
||||||
|
@ -205,34 +208,42 @@ def handle_logs():
|
||||||
|
|
||||||
@app.route("/api/group-description/<group>", methods=["GET", "POST"])
|
@app.route("/api/group-description/<group>", methods=["GET", "POST"])
|
||||||
def handle_group_description(group):
|
def handle_group_description(group):
|
||||||
description_path = os.path.join(
|
|
||||||
config_manager.script_groups_path, group, "description.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
try:
|
try:
|
||||||
with open(description_path, "r", encoding="utf-8") as f:
|
details = config_manager.get_group_details(group)
|
||||||
return jsonify(json.load(f))
|
if "error" in details:
|
||||||
except FileNotFoundError:
|
return jsonify(details), 404 # Group not found
|
||||||
return jsonify(
|
return jsonify(details)
|
||||||
{
|
except Exception as e:
|
||||||
"name": group,
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
"description": "Sin descripción",
|
|
||||||
"version": "1.0",
|
|
||||||
"author": "Unknown",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else: # POST
|
else: # POST
|
||||||
try:
|
try:
|
||||||
data = request.json
|
data = request.json
|
||||||
os.makedirs(os.path.dirname(description_path), exist_ok=True)
|
result = config_manager.update_group_description(group, data)
|
||||||
with open(description_path, "w", encoding="utf-8") as f:
|
return jsonify(result)
|
||||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
||||||
return jsonify({"status": "success"})
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return jsonify({"status": "error", "message": str(e)}), 500
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/script-details/<group>/<script_filename>", methods=["GET", "POST"])
|
||||||
|
def handle_script_details(group, script_filename):
|
||||||
|
if request.method == "GET":
|
||||||
|
try:
|
||||||
|
details = config_manager.get_script_details(group, script_filename)
|
||||||
|
return jsonify(details)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error getting script details for {group}/{script_filename}: {e}")
|
||||||
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
|
else: # POST
|
||||||
|
try:
|
||||||
|
data = request.json
|
||||||
|
result = config_manager.update_script_details(group, script_filename, data)
|
||||||
|
return jsonify(result)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error updating script details for {group}/{script_filename}: {e}")
|
||||||
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
@app.route("/api/directory-history/<group>")
|
@app.route("/api/directory-history/<group>")
|
||||||
def get_directory_history(group):
|
def get_directory_history(group):
|
||||||
history = config_manager.get_directory_history(group)
|
history = config_manager.get_directory_history(group)
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"x1.py": {
|
||||||
|
"display_name": "Procesador de Emails a Cronología",
|
||||||
|
"short_description": "Script para desensamblar los emails y generar un archivo md con la cronología de los mensajes.",
|
||||||
|
"long_description": "## Descripción de funcionamiento:\n***\nEste script procesa archivos de correo electrónico (`.eml`) para extraer su contenido, gestionar adjuntos y generar un archivo Markdown que presenta los mensajes en orden cronológico inverso.\n***\n**Lógica Principal:**\n\n1. **Configuración:** Carga parámetros desde `ParamManagerScripts` (directorio de trabajo, nombre del archivo de salida Markdown, nombre del directorio de adjuntos).\n2. **Beautify:** Carga reglas de embellecimiento de texto desde `config/beautify_rules.json` para limpiar el contenido de los correos.\n3. **Descubrimiento:** Busca todos los archivos `.eml` en el directorio de trabajo configurado.\n4. **Procesamiento Individual:**\n * Itera sobre cada archivo `.eml` encontrado.\n * Utiliza `utils.email_parser.procesar_eml` para extraer metadatos (fecha, asunto, remitente, destinatarios), contenido del cuerpo y guardar los archivos adjuntos en la carpeta especificada.\n * Calcula un hash para cada mensaje para detectar duplicados.\n * Si un mensaje es nuevo (no duplicado):\n * Aplica las reglas de `BeautifyProcessor` al contenido del cuerpo.\n * Añade el mensaje procesado a una lista.\n5. **Ordenación:** Ordena la lista de mensajes únicos por fecha, del más reciente al más antiguo.\n6. **Generación de Índice:** Crea una sección de índice en formato Markdown con enlaces internos a cada mensaje.\n7. **Salida Markdown:** Escribe el índice seguido del contenido formateado en Markdown de cada mensaje en el archivo de salida configurado (ej. `cronologia.md`).\n",
|
||||||
|
"hidden": false
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,9 +1,9 @@
|
||||||
--- Log de Ejecución: x0_main.py ---
|
--- Log de Ejecución: x0_main.py ---
|
||||||
Grupo: XML Parser to SCL
|
Grupo: XML Parser to SCL
|
||||||
Directorio de Trabajo: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport
|
Directorio de Trabajo: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport
|
||||||
Inicio: 2025-05-03 17:05:03
|
Inicio: 2025-05-03 23:21:54
|
||||||
Fin: 2025-05-03 17:05:08
|
Fin: 2025-05-03 23:21:59
|
||||||
Duración: 0:00:04.868601
|
Duración: 0:00:05.324381
|
||||||
Estado: ERROR (Código de Salida: 1)
|
Estado: ERROR (Código de Salida: 1)
|
||||||
|
|
||||||
--- SALIDA ESTÁNDAR (STDOUT) ---
|
--- SALIDA ESTÁNDAR (STDOUT) ---
|
||||||
|
@ -2686,7 +2686,7 @@ Error: Tipo de bloque desconocido 'UnknownBlockType'. No se generará archivo.
|
||||||
Error: Tipo de bloque desconocido 'UnknownBlockType'. No se generará archivo.
|
Error: Tipo de bloque desconocido 'UnknownBlockType'. No se generará archivo.
|
||||||
Error al generar/escribir el ÁRBOL XRef de llamadas: generate_call_tree_output() missing 2 required positional arguments: 'max_call_depth' and 'xref_source_subdir'
|
Error al generar/escribir el ÁRBOL XRef de llamadas: generate_call_tree_output() missing 2 required positional arguments: 'max_call_depth' and 'xref_source_subdir'
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
File "D:\Proyectos\Scripts\ParamManagerScripts\backend\script_groups\XML Parser to SCL\x4_cross_reference.py", line 618, in generate_cross_references
|
File "d:\Proyectos\Scripts\ParamManagerScripts\backend\script_groups\XML Parser to SCL\x4_cross_reference.py", line 618, in generate_cross_references
|
||||||
call_tree_lines = generate_call_tree_output( # Pasar parámetros
|
call_tree_lines = generate_call_tree_output( # Pasar parámetros
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
TypeError: generate_call_tree_output() missing 2 required positional arguments: 'max_call_depth' and 'xref_source_subdir'
|
TypeError: generate_call_tree_output() missing 2 required positional arguments: 'max_call_depth' and 'xref_source_subdir'
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
--- Log de Ejecución: x1_to_json.py ---
|
||||||
|
Grupo: XML Parser to SCL
|
||||||
|
Directorio de Trabajo: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport
|
||||||
|
Inicio: 2025-05-03 20:08:18
|
||||||
|
Fin: 2025-05-03 20:08:22
|
||||||
|
Duración: 0:00:03.850097
|
||||||
|
Estado: SUCCESS (Código de Salida: 0)
|
||||||
|
|
||||||
|
--- SALIDA ESTÁNDAR (STDOUT) ---
|
||||||
|
Por favor, selecciona el archivo XML de entrada...
|
||||||
|
|
||||||
|
--- ERRORES (STDERR) ---
|
||||||
|
No se seleccionó ningún archivo. Saliendo.
|
||||||
|
|
||||||
|
--- FIN DEL LOG ---
|
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
"x0_main.py": {
|
||||||
|
"display_name": "Procesar Exportación XML",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "Este script es el punto de entrada y orquestador principal para el proceso de conversión de archivos XML de Siemens TIA Portal (LAD/FUP) a código SCL y la generación de documentación relacionada.\n\n**Lógica Principal:**\n\n1. **Configuración:** Carga parámetros desde `ParamManagerScripts` (directorio de trabajo, nombres de carpetas de salida, etc.).\n2. **Logging:** Inicia un archivo `log.txt` para registrar detalladamente el progreso y los errores.\n3. **Descubrimiento:** Busca recursivamente todos los archivos `.xml` dentro del subdirectorio `PLC` del directorio de trabajo configurado.\n4. **Procesamiento Individual (Pasos x1-x3):**\n * Itera sobre cada archivo XML encontrado.\n * Implementa lógica para **saltar** pasos si el XML no ha cambiado y las salidas ya existen y están actualizadas.\n * Llama a funciones de `x1_to_json.py`, `x2_process.py`, y `x3_generate_scl.py` para convertir XML -> JSON intermedio -> JSON procesado -> archivo SCL/Markdown final.\n5. **Referencias Cruzadas (Paso x4):** Llama a una función de `x4_cross_reference.py` para generar análisis de llamadas, uso de DBs, etc., basándose en los archivos procesados.\n6. **Agregación (Paso x5):** Llama a una función de `x5_aggregate.py` para combinar las salidas SCL/Markdown y las referencias cruzadas en un único archivo Markdown resumen.\n7. **Resumen y Salida:** Registra un resumen final del proceso (éxitos, saltos, fallos) y finaliza con un código de estado (0 para éxito, 1 si hubo errores).\n",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
"x1_to_json.py": {
|
||||||
|
"display_name": "x1_to_json",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": true
|
||||||
|
},
|
||||||
|
"x2_process.py": {
|
||||||
|
"display_name": "x2_process",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": true
|
||||||
|
},
|
||||||
|
"x3_generate_scl.py": {
|
||||||
|
"display_name": "x3_generate_scl",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": true
|
||||||
|
},
|
||||||
|
"x4_cross_reference.py": {
|
||||||
|
"display_name": "x4_cross_reference",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": true
|
||||||
|
},
|
||||||
|
"x5_aggregate.py": {
|
||||||
|
"display_name": "x5_aggregate",
|
||||||
|
"short_description": "LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": true
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
--- Log de Ejecución: x1.py ---
|
||||||
|
Grupo: example_group
|
||||||
|
Directorio de Trabajo: C:\Estudio
|
||||||
|
Inicio: 2025-05-03 21:21:53
|
||||||
|
Fin: 2025-05-03 21:21:58
|
||||||
|
Duración: 0:00:05.144464
|
||||||
|
Estado: SUCCESS (Código de Salida: 0)
|
||||||
|
|
||||||
|
--- SALIDA ESTÁNDAR (STDOUT) ---
|
||||||
|
=== Ejecutando Script de Prueba 1 ===
|
||||||
|
|
||||||
|
Configuraciones cargadas:
|
||||||
|
Nivel 1: {
|
||||||
|
"api_key": "your-api-key-here",
|
||||||
|
"model": "gpt-3.5-turbo"
|
||||||
|
}
|
||||||
|
Nivel 2: {
|
||||||
|
"input_dir": "D:/Datos/Entrada",
|
||||||
|
"output_dir": "D:/Datos/Salida",
|
||||||
|
"batch_size": 50
|
||||||
|
}
|
||||||
|
Nivel 3: {}
|
||||||
|
|
||||||
|
Simulando procesamiento...
|
||||||
|
Progreso: 20%
|
||||||
|
Progreso: 40%
|
||||||
|
Progreso: 60%
|
||||||
|
Progreso: 80%
|
||||||
|
Progreso: 100%
|
||||||
|
|
||||||
|
¡Proceso completado!
|
||||||
|
|
||||||
|
--- ERRORES (STDERR) ---
|
||||||
|
Ninguno
|
||||||
|
--- FIN DEL LOG ---
|
|
@ -0,0 +1,24 @@
|
||||||
|
--- Log de Ejecución: x2.py ---
|
||||||
|
Grupo: example_group
|
||||||
|
Directorio de Trabajo: C:\Estudio
|
||||||
|
Inicio: 2025-05-03 20:48:23
|
||||||
|
Fin: 2025-05-03 20:48:27
|
||||||
|
Duración: 0:00:03.208350
|
||||||
|
Estado: SUCCESS (Código de Salida: 0)
|
||||||
|
|
||||||
|
--- SALIDA ESTÁNDAR (STDOUT) ---
|
||||||
|
=== Ejecutando Script de Prueba 2 ===
|
||||||
|
|
||||||
|
Iniciando análisis de datos simulado...
|
||||||
|
Analizando lote 1...
|
||||||
|
Lote 1 completado exitosamente
|
||||||
|
Analizando lote 2...
|
||||||
|
Lote 2 completado exitosamente
|
||||||
|
Analizando lote 3...
|
||||||
|
|
||||||
|
ERROR: Error simulado en el procesamiento
|
||||||
|
El proceso se detuvo debido a un error
|
||||||
|
|
||||||
|
--- ERRORES (STDERR) ---
|
||||||
|
Ninguno
|
||||||
|
--- FIN DEL LOG ---
|
|
@ -8,8 +8,6 @@
|
||||||
"output_dir": "D:/Datos/Salida",
|
"output_dir": "D:/Datos/Salida",
|
||||||
"batch_size": 50
|
"batch_size": 50
|
||||||
},
|
},
|
||||||
"level3": {
|
"level3": {},
|
||||||
"in_dir": "ingesta"
|
|
||||||
},
|
|
||||||
"working_directory": "C:\\Estudio"
|
"working_directory": "C:\\Estudio"
|
||||||
}
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"x1.py": {
|
||||||
|
"display_name": "x1: Basico muestra los datos de config",
|
||||||
|
"short_description": "Script de prueba que imprime las configuraciones y realiza una tarea simple.",
|
||||||
|
"long_description": "Test",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
"x2.py": {
|
||||||
|
"display_name": "x2 : Simula un proceso",
|
||||||
|
"short_description": "Script de prueba que simula un proceso de análisis de datos.",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": false
|
||||||
|
}
|
||||||
|
}
|
1003
config_manager.py
1003
config_manager.py
File diff suppressed because it is too large
Load Diff
|
@ -1,20 +0,0 @@
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"api_key": {
|
|
||||||
"type": "string",
|
|
||||||
"title": "API Key",
|
|
||||||
"description": "Tu clave de API para servicios externos"
|
|
||||||
},
|
|
||||||
"model": {
|
|
||||||
"type": "string",
|
|
||||||
"title": "Modelo LLM",
|
|
||||||
"description": "Modelo de lenguaje a utilizar",
|
|
||||||
"enum": [
|
|
||||||
"gpt-3.5-turbo",
|
|
||||||
"gpt-4",
|
|
||||||
"claude-v1"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,4 +1,20 @@
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {}
|
"properties": {
|
||||||
|
"api_key": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API Key",
|
||||||
|
"description": "Tu clave de API para servicios externos"
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Modelo LLM",
|
||||||
|
"description": "Modelo de lenguaje a utilizar",
|
||||||
|
"enum": [
|
||||||
|
"gpt-3.5-turbo",
|
||||||
|
"gpt-4",
|
||||||
|
"claude-v1"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
2293
data/log.txt
2293
data/log.txt
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,168 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from typing import Dict, Any, Optional, Callable
|
||||||
|
from .schema_handler import SchemaHandler # Import SchemaHandler
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
data_path: str,
|
||||||
|
script_groups_path: str,
|
||||||
|
get_workdir_func: Callable[[], Optional[str]],
|
||||||
|
schema_handler: SchemaHandler,
|
||||||
|
):
|
||||||
|
self.data_path = data_path
|
||||||
|
self.script_groups_path = script_groups_path
|
||||||
|
self._get_working_directory = (
|
||||||
|
get_workdir_func # Function to get current workdir
|
||||||
|
)
|
||||||
|
self.schema_handler = schema_handler # Instance of SchemaHandler
|
||||||
|
|
||||||
|
def get_config(self, level: str, group: str = None) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get configuration for specified level.
|
||||||
|
Applies default values from the corresponding schema if the config
|
||||||
|
file doesn't exist or is missing keys with defaults.
|
||||||
|
"""
|
||||||
|
config_data = {}
|
||||||
|
needs_save = False
|
||||||
|
schema = None
|
||||||
|
data_path = self._get_config_path(level, group)
|
||||||
|
schema_path_for_debug = "N/A" # For logging
|
||||||
|
|
||||||
|
if not data_path:
|
||||||
|
if level == "3": # Level 3 depends on working directory
|
||||||
|
return {} # Return empty if working dir not set for L3
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"error": f"Could not determine config path for level {level}, group {group}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine schema path for logging purposes (actual loading done by schema_handler)
|
||||||
|
if level == "1":
|
||||||
|
schema_path_for_debug = os.path.join(self.data_path, "esquema_general.json")
|
||||||
|
elif level == "2" and group:
|
||||||
|
schema_path_for_debug = os.path.join(
|
||||||
|
self.script_groups_path, group, "esquema_group.json"
|
||||||
|
)
|
||||||
|
elif level == "3" and group:
|
||||||
|
schema_path_for_debug = os.path.join(
|
||||||
|
self.script_groups_path, group, "esquema_work.json"
|
||||||
|
)
|
||||||
|
elif level == "3":
|
||||||
|
schema_path_for_debug = "N/A (Level 3 without group)"
|
||||||
|
|
||||||
|
# Get schema using SchemaHandler
|
||||||
|
try:
|
||||||
|
schema = self.schema_handler.get_schema(level, group)
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"Warning: Could not load schema for level {level}, group {group}. Defaults will not be applied. Error: {e}"
|
||||||
|
)
|
||||||
|
schema = None
|
||||||
|
|
||||||
|
# Try to load existing data
|
||||||
|
data_file_exists = os.path.exists(data_path)
|
||||||
|
if data_file_exists:
|
||||||
|
try:
|
||||||
|
with open(data_path, "r", encoding="utf-8") as f_data:
|
||||||
|
content = f_data.read()
|
||||||
|
if content.strip():
|
||||||
|
config_data = json.loads(content)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Warning: Data file {data_path} is empty. Will initialize with defaults."
|
||||||
|
)
|
||||||
|
needs_save = True
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(
|
||||||
|
f"Warning: Could not decode JSON from {data_path}. Will initialize with defaults."
|
||||||
|
)
|
||||||
|
config_data = {}
|
||||||
|
needs_save = True
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"Error reading data from {data_path}: {e}. Will attempt to initialize with defaults."
|
||||||
|
)
|
||||||
|
config_data = {}
|
||||||
|
needs_save = True
|
||||||
|
else: # File doesn't exist
|
||||||
|
print(
|
||||||
|
f"Info: Data file not found at {data_path}. Will initialize with defaults."
|
||||||
|
)
|
||||||
|
needs_save = True
|
||||||
|
|
||||||
|
# Apply defaults from schema
|
||||||
|
if schema and isinstance(schema, dict) and "properties" in schema:
|
||||||
|
schema_properties = schema.get("properties", {})
|
||||||
|
if isinstance(schema_properties, dict):
|
||||||
|
for key, prop_definition in schema_properties.items():
|
||||||
|
if (
|
||||||
|
isinstance(prop_definition, dict)
|
||||||
|
and key not in config_data
|
||||||
|
and "default" in prop_definition
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
f"Info: Applying default for '{key}' from schema {schema_path_for_debug}"
|
||||||
|
)
|
||||||
|
config_data[key] = prop_definition["default"]
|
||||||
|
needs_save = True
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Warning: 'properties' in schema {schema_path_for_debug} is not a dictionary. Cannot apply defaults."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save if needed
|
||||||
|
if needs_save:
|
||||||
|
try:
|
||||||
|
print(f"Info: Saving updated config data to: {data_path}")
|
||||||
|
os.makedirs(os.path.dirname(data_path), exist_ok=True)
|
||||||
|
with open(data_path, "w", encoding="utf-8") as f_data:
|
||||||
|
json.dump(config_data, f_data, indent=2, ensure_ascii=False)
|
||||||
|
except IOError as e:
|
||||||
|
print(f"Error: Could not write data file to {data_path}: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Unexpected error saving data to {data_path}: {e}")
|
||||||
|
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
def update_config(
|
||||||
|
self, level: str, data: Dict[str, Any], group: str = None
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Update configuration for specified level."""
|
||||||
|
path = self._get_config_path(level, group)
|
||||||
|
if not path:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Could not determine config path for level {level}, group {group}",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
print(f"Info: Config successfully updated at {path}")
|
||||||
|
return {"status": "success"}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error updating config at {path}: {str(e)}")
|
||||||
|
return {"status": "error", "message": str(e)}
|
||||||
|
|
||||||
|
def _get_config_path(
|
||||||
|
self, level: str, group: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Helper to determine the config file path."""
|
||||||
|
if level == "1":
|
||||||
|
return os.path.join(self.data_path, "data.json")
|
||||||
|
elif level == "2":
|
||||||
|
if not group:
|
||||||
|
return None
|
||||||
|
return os.path.join(self.script_groups_path, group, "data.json")
|
||||||
|
elif level == "3":
|
||||||
|
working_directory = self._get_working_directory()
|
||||||
|
if working_directory and os.path.isdir(working_directory):
|
||||||
|
return os.path.join(working_directory, "data.json")
|
||||||
|
else:
|
||||||
|
return None # Cannot determine L3 path without valid workdir
|
||||||
|
else:
|
||||||
|
return None
|
|
@ -0,0 +1,324 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
import re # Necesario para extraer docstring
|
||||||
|
|
||||||
|
# Import the new modules
|
||||||
|
from .logger import Logger
|
||||||
|
from .directory_manager import DirectoryManager
|
||||||
|
from .group_manager import GroupManager
|
||||||
|
from .schema_handler import SchemaHandler
|
||||||
|
from .config_handler import ConfigHandler
|
||||||
|
from .script_executor import ScriptExecutor
|
||||||
|
|
||||||
|
# Keep time for execution throttling state
|
||||||
|
import time
|
||||||
|
from datetime import datetime # Needed for append_log timestamp if we keep it here
|
||||||
|
|
||||||
|
|
||||||
|
# --- ConfigurationManager Class ---
|
||||||
|
class ConfigurationManager:
|
||||||
|
def __init__(self):
|
||||||
|
# Adjust base_path to point to the project root (one level up from lib)
|
||||||
|
lib_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
self.base_path = os.path.dirname(lib_dir)
|
||||||
|
self.data_path = os.path.join(self.base_path, "data")
|
||||||
|
self.script_groups_path = os.path.join(
|
||||||
|
self.base_path, "backend", "script_groups"
|
||||||
|
)
|
||||||
|
self.working_directory = None
|
||||||
|
# log_file_path is now managed by the Logger instance
|
||||||
|
|
||||||
|
# State for script execution throttling
|
||||||
|
self.last_execution_time = 0
|
||||||
|
# Minimum seconds between script executions to prevent rapid clicks
|
||||||
|
self.min_execution_interval = 1
|
||||||
|
|
||||||
|
# Instantiate handlers/managers
|
||||||
|
self.logger = Logger(os.path.join(self.data_path, "log.txt")) # Pass log path to Logger
|
||||||
|
self.dir_manager = DirectoryManager(self.script_groups_path, self._set_working_directory_internal)
|
||||||
|
self.group_manager = GroupManager(self.script_groups_path)
|
||||||
|
self.schema_handler = SchemaHandler(self.data_path, self.script_groups_path, self._get_working_directory_internal)
|
||||||
|
self.config_handler = ConfigHandler(self.data_path, self.script_groups_path, self._get_working_directory_internal, self.schema_handler)
|
||||||
|
self.script_executor = ScriptExecutor(
|
||||||
|
self.script_groups_path,
|
||||||
|
self.dir_manager,
|
||||||
|
self.config_handler,
|
||||||
|
self.logger, # Pass the central logger instance
|
||||||
|
self._get_execution_state_internal,
|
||||||
|
self._set_last_execution_time_internal
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- Internal Callbacks/Getters for Sub-Managers ---
|
||||||
|
def _set_working_directory_internal(self, path: Optional[str]):
|
||||||
|
"""Callback for DirectoryManager to update the main working directory."""
|
||||||
|
if path and os.path.isdir(path):
|
||||||
|
self.working_directory = path
|
||||||
|
# Create data.json in the new working directory if it doesn't exist
|
||||||
|
# This ensures L3 config can be created/read immediately after setting WD
|
||||||
|
data_json_path = os.path.join(path, "data.json")
|
||||||
|
if not os.path.exists(data_json_path):
|
||||||
|
try:
|
||||||
|
with open(data_json_path, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
print(f"Info: Created empty data.json in new working directory: {data_json_path}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Could not create data.json in {path}: {e}")
|
||||||
|
else:
|
||||||
|
self.working_directory = None
|
||||||
|
|
||||||
|
def _get_working_directory_internal(self) -> Optional[str]:
|
||||||
|
"""Provides the current working directory to sub-managers."""
|
||||||
|
return self.working_directory
|
||||||
|
|
||||||
|
def _get_execution_state_internal(self) -> Dict[str, Any]:
|
||||||
|
"""Provides execution throttling state to ScriptExecutor."""
|
||||||
|
return {"last_time": self.last_execution_time, "interval": self.min_execution_interval}
|
||||||
|
|
||||||
|
def _set_last_execution_time_internal(self, exec_time: float):
|
||||||
|
"""Callback for ScriptExecutor to update the last execution time."""
|
||||||
|
self.last_execution_time = exec_time
|
||||||
|
|
||||||
|
# --- Logging Methods (Delegated) ---
|
||||||
|
def append_log(self, message: str) -> None:
|
||||||
|
# The Logger class now handles timestamping internally.
|
||||||
|
# We just need to pass the raw message.
|
||||||
|
# The broadcast_message in app.py might still add its own timestamp for display,
|
||||||
|
# but the core logging is handled by the Logger instance.
|
||||||
|
self.logger.append_log(message)
|
||||||
|
|
||||||
|
def read_log(self) -> str:
|
||||||
|
return self.logger.read_log()
|
||||||
|
|
||||||
|
def clear_log(self) -> bool:
|
||||||
|
return self.logger.clear_log()
|
||||||
|
|
||||||
|
# --- Working Directory Methods (Delegated) ---
|
||||||
|
def set_work_dir(self, group: str, path: str) -> Dict[str, str]:
|
||||||
|
"""Sets the working directory for a group and updates the global working directory."""
|
||||||
|
# Note: This now primarily updates the group's work_dir.json and calls the internal setter
|
||||||
|
return self.dir_manager.set_work_dir_for_group(group, path)
|
||||||
|
|
||||||
|
def get_work_dir(self, group: str) -> Optional[str]:
|
||||||
|
"""Gets the stored working directory for a group and sets it globally if valid."""
|
||||||
|
path = self.dir_manager.get_work_dir_for_group(group)
|
||||||
|
# Ensure the global working directory is updated when fetched successfully
|
||||||
|
self._set_working_directory_internal(path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def get_directory_history(self, group: str) -> List[str]:
|
||||||
|
return self.dir_manager.get_directory_history(group)
|
||||||
|
|
||||||
|
# --- Script Group Methods (Delegated) ---
|
||||||
|
def get_script_groups(self) -> List[Dict[str, Any]]:
|
||||||
|
return self.group_manager.get_script_groups()
|
||||||
|
|
||||||
|
def get_group_details(self, group: str) -> Dict[str, Any]:
|
||||||
|
"""Get details (description, etc.) for a specific group."""
|
||||||
|
group_path = os.path.join(self.script_groups_path, group)
|
||||||
|
if not os.path.isdir(group_path):
|
||||||
|
return {"error": "Group not found"}
|
||||||
|
# Use the internal method of GroupManager
|
||||||
|
details = self.group_manager._get_group_description(group_path)
|
||||||
|
# Ensure default values if description file is missing/empty
|
||||||
|
details.setdefault("name", group)
|
||||||
|
details.setdefault("description", "Sin descripción")
|
||||||
|
details.setdefault("version", "1.0")
|
||||||
|
details.setdefault("author", "Unknown")
|
||||||
|
return details
|
||||||
|
|
||||||
|
def update_group_description(self, group: str, data: Dict[str, Any]) -> Dict[str, str]:
|
||||||
|
"""Update the description file for a specific group."""
|
||||||
|
description_path = os.path.join(self.script_groups_path, group, "description.json")
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(description_path), exist_ok=True)
|
||||||
|
with open(description_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
return {"status": "success"}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error updating group description for {group}: {e}")
|
||||||
|
return {"status": "error", "message": str(e)}
|
||||||
|
|
||||||
|
# --- Configuration (data.json) Methods (Delegated) ---
|
||||||
|
def get_config(self, level: str, group: str = None) -> Dict[str, Any]:
|
||||||
|
# ConfigHandler uses the _get_working_directory_internal callback
|
||||||
|
return self.config_handler.get_config(level, group)
|
||||||
|
|
||||||
|
def update_config(
|
||||||
|
self, level: str, data: Dict[str, Any], group: str = None
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
return self.config_handler.update_config(level, data, group)
|
||||||
|
|
||||||
|
# --- Schema Methods (Delegated) ---
|
||||||
|
def get_schema(self, level: str, group: str = None) -> Dict[str, Any]:
|
||||||
|
# SchemaHandler uses the _get_working_directory_internal callback
|
||||||
|
return self.schema_handler.get_schema(level, group)
|
||||||
|
|
||||||
|
def update_schema(
|
||||||
|
self, level: str, data: Dict[str, Any], group: str = None
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
# SchemaHandler uses the _get_working_directory_internal callback
|
||||||
|
return self.schema_handler.update_schema(level, data, group)
|
||||||
|
|
||||||
|
# --- Script Listing and Execution Methods ---
|
||||||
|
|
||||||
|
# --- Métodos para manejar scripts_description.json ---
|
||||||
|
|
||||||
|
def _get_group_path(self, group_id: str) -> Optional[str]:
|
||||||
|
"""Obtiene la ruta completa a la carpeta de un grupo."""
|
||||||
|
path = os.path.join(self.script_groups_path, group_id)
|
||||||
|
return path if os.path.isdir(path) else None
|
||||||
|
|
||||||
|
def _get_script_descriptions_path(self, group_id: str) -> Optional[str]:
|
||||||
|
"""Obtiene la ruta al archivo scripts_description.json de un grupo."""
|
||||||
|
group_path = self._get_group_path(group_id)
|
||||||
|
if not group_path:
|
||||||
|
return None
|
||||||
|
return os.path.join(group_path, 'scripts_description.json')
|
||||||
|
|
||||||
|
def _load_script_descriptions(self, group_id: str) -> Dict[str, Any]:
|
||||||
|
"""Carga las descripciones de scripts desde scripts_description.json."""
|
||||||
|
path = self._get_script_descriptions_path(group_id)
|
||||||
|
if path and os.path.exists(path):
|
||||||
|
try:
|
||||||
|
with open(path, 'r', encoding='utf-8') as f:
|
||||||
|
return json.load(f)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f"Error: JSON inválido en {path}")
|
||||||
|
return {}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error leyendo {path}: {e}")
|
||||||
|
return {}
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _save_script_descriptions(self, group_id: str, descriptions: Dict[str, Any]) -> bool:
|
||||||
|
"""Guarda las descripciones de scripts en scripts_description.json."""
|
||||||
|
path = self._get_script_descriptions_path(group_id)
|
||||||
|
if path:
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True) # Asegura que el directorio del grupo existe
|
||||||
|
with open(path, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(descriptions, f, indent=4, ensure_ascii=False)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error escribiendo en {path}: {e}")
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _extract_short_description(self, script_path: str) -> str:
|
||||||
|
"""Extrae la primera línea del docstring de un script Python."""
|
||||||
|
try:
|
||||||
|
with open(script_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
# Buscar docstring al inicio del archivo """...""" o '''...'''
|
||||||
|
match = re.match(r'^\s*("""(.*?)"""|\'\'\'(.*?)\'\'\')', content, re.DOTALL | re.MULTILINE)
|
||||||
|
if match:
|
||||||
|
# Obtener el contenido del docstring (grupo 2 o 3)
|
||||||
|
docstring = match.group(2) or match.group(3)
|
||||||
|
# Tomar la primera línea no vacía
|
||||||
|
first_line = next((line.strip() for line in docstring.strip().splitlines() if line.strip()), None)
|
||||||
|
return first_line if first_line else "Sin descripción corta."
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error extrayendo descripción de {script_path}: {e}")
|
||||||
|
return "Sin descripción corta."
|
||||||
|
|
||||||
|
def list_scripts(self, group: str) -> List[Dict[str, str]]:
|
||||||
|
"""Lista scripts visibles con sus detalles desde scripts_description.json."""
|
||||||
|
group_path = self._get_group_path(group)
|
||||||
|
if not group_path:
|
||||||
|
return []
|
||||||
|
|
||||||
|
descriptions = self._load_script_descriptions(group)
|
||||||
|
updated = False
|
||||||
|
scripts_details = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Listar archivos .py en el directorio del grupo
|
||||||
|
script_files = [f for f in os.listdir(group_path) if f.endswith('.py') and os.path.isfile(os.path.join(group_path, f))]
|
||||||
|
|
||||||
|
for filename in script_files:
|
||||||
|
script_path = os.path.join(group_path, filename)
|
||||||
|
if filename not in descriptions:
|
||||||
|
print(f"Script '{filename}' no encontrado en descripciones, auto-populando.")
|
||||||
|
short_desc = self._extract_short_description(script_path)
|
||||||
|
descriptions[filename] = {
|
||||||
|
"display_name": filename.replace('.py', ''), # Nombre por defecto
|
||||||
|
"short_description": short_desc,
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": False
|
||||||
|
}
|
||||||
|
updated = True
|
||||||
|
|
||||||
|
# Añadir a la lista si no está oculto
|
||||||
|
details = descriptions[filename]
|
||||||
|
if not details.get('hidden', False):
|
||||||
|
scripts_details.append({
|
||||||
|
"filename": filename, # Nombre real del archivo
|
||||||
|
"display_name": details.get("display_name", filename.replace('.py', '')),
|
||||||
|
"short_description": details.get("short_description", "Sin descripción corta."),
|
||||||
|
"long_description": details.get("long_description", "") # Añadir descripción larga
|
||||||
|
})
|
||||||
|
|
||||||
|
if updated:
|
||||||
|
self._save_script_descriptions(group, descriptions)
|
||||||
|
|
||||||
|
# Ordenar por display_name para consistencia
|
||||||
|
scripts_details.sort(key=lambda x: x['display_name'])
|
||||||
|
return scripts_details
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error listando scripts para el grupo {group}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_script_details(self, group_id: str, script_filename: str) -> Dict[str, Any]:
|
||||||
|
"""Obtiene los detalles completos de un script específico."""
|
||||||
|
descriptions = self._load_script_descriptions(group_id)
|
||||||
|
# Devolver detalles o un diccionario por defecto si no existe (aunque list_scripts debería crearlo)
|
||||||
|
return descriptions.get(script_filename, {
|
||||||
|
"display_name": script_filename.replace('.py', ''),
|
||||||
|
"short_description": "No encontrado.",
|
||||||
|
"long_description": "",
|
||||||
|
"hidden": False
|
||||||
|
})
|
||||||
|
|
||||||
|
def update_script_details(self, group_id: str, script_filename: str, details: Dict[str, Any]) -> Dict[str, str]:
|
||||||
|
"""Actualiza los detalles de un script específico."""
|
||||||
|
descriptions = self._load_script_descriptions(group_id)
|
||||||
|
if script_filename in descriptions:
|
||||||
|
# Asegurarse de que los campos esperados están presentes y actualizar
|
||||||
|
descriptions[script_filename]["display_name"] = details.get("display_name", descriptions[script_filename].get("display_name", script_filename.replace('.py', '')))
|
||||||
|
descriptions[script_filename]["short_description"] = details.get("short_description", descriptions[script_filename].get("short_description", "")) # Actualizar descripción corta
|
||||||
|
descriptions[script_filename]["long_description"] = details.get("long_description", descriptions[script_filename].get("long_description", ""))
|
||||||
|
descriptions[script_filename]["hidden"] = details.get("hidden", descriptions[script_filename].get("hidden", False))
|
||||||
|
|
||||||
|
if self._save_script_descriptions(group_id, descriptions):
|
||||||
|
return {"status": "success"}
|
||||||
|
else:
|
||||||
|
return {"status": "error", "message": "Fallo al guardar las descripciones de los scripts."}
|
||||||
|
else:
|
||||||
|
# Intentar crear la entrada si el script existe pero no está en el JSON (caso raro)
|
||||||
|
group_path = self._get_group_path(group_id)
|
||||||
|
script_path = os.path.join(group_path, script_filename) if group_path else None
|
||||||
|
if script_path and os.path.exists(script_path):
|
||||||
|
print(f"Advertencia: El script '{script_filename}' existe pero no estaba en descriptions.json. Creando entrada.")
|
||||||
|
short_desc = self._extract_short_description(script_path)
|
||||||
|
descriptions[script_filename] = {
|
||||||
|
"display_name": details.get("display_name", script_filename.replace('.py', '')),
|
||||||
|
"short_description": short_desc, # Usar la extraída
|
||||||
|
"long_description": details.get("long_description", ""),
|
||||||
|
"hidden": details.get("hidden", False)
|
||||||
|
}
|
||||||
|
if self._save_script_descriptions(group_id, descriptions):
|
||||||
|
return {"status": "success"}
|
||||||
|
else:
|
||||||
|
return {"status": "error", "message": "Fallo al guardar las descripciones de los scripts después de crear la entrada."}
|
||||||
|
else:
|
||||||
|
return {"status": "error", "message": f"Script '{script_filename}' no encontrado en las descripciones ni en el sistema de archivos."}
|
||||||
|
|
||||||
|
def execute_script(
|
||||||
|
self, group: str, script_name: str, broadcast_fn=None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
# ScriptExecutor uses callbacks to get/set execution state
|
||||||
|
return self.script_executor.execute_script(group, script_name, broadcast_fn)
|
|
@ -0,0 +1,97 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from typing import Dict, List, Optional, Callable
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryManager:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
script_groups_path: str,
|
||||||
|
set_global_workdir_callback: Callable[[Optional[str]], None],
|
||||||
|
):
|
||||||
|
self.script_groups_path = script_groups_path
|
||||||
|
self._set_global_workdir = (
|
||||||
|
set_global_workdir_callback # Callback to update main manager's workdir
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_work_dir_for_group(self, group: str) -> Optional[str]:
|
||||||
|
"""Get working directory path for a script group from work_dir.json."""
|
||||||
|
work_dir_path = os.path.join(self.script_groups_path, group, "work_dir.json")
|
||||||
|
try:
|
||||||
|
with open(work_dir_path, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
path = data.get("path", "")
|
||||||
|
if path:
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
if path and os.path.isdir(path):
|
||||||
|
return path
|
||||||
|
elif path:
|
||||||
|
print(
|
||||||
|
f"Warning: Stored working directory for group '{group}' is invalid or does not exist: {path}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading work_dir.json for group '{group}': {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_directory_history(self, group: str) -> List[str]:
|
||||||
|
"""Get the directory history for a script group."""
|
||||||
|
work_dir_path = os.path.join(self.script_groups_path, group, "work_dir.json")
|
||||||
|
try:
|
||||||
|
with open(work_dir_path, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
history = [os.path.normpath(p) for p in data.get("history", [])]
|
||||||
|
return [p for p in history if os.path.isdir(p)]
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading directory history for group '{group}': {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def set_work_dir_for_group(self, group: str, path: str) -> Dict[str, str]:
|
||||||
|
"""Set working directory path for a script group and update history."""
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
|
||||||
|
if not os.path.isdir(path): # Check if it's a valid directory
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Directory does not exist or is not valid: {path}",
|
||||||
|
}
|
||||||
|
|
||||||
|
work_dir_file = os.path.join(self.script_groups_path, group, "work_dir.json")
|
||||||
|
work_dir_folder = os.path.dirname(work_dir_file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(work_dir_folder, exist_ok=True) # Ensure group folder exists
|
||||||
|
try:
|
||||||
|
with open(work_dir_file, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
if "history" in data:
|
||||||
|
data["history"] = [os.path.normpath(p) for p in data["history"]]
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
data = {"path": "", "history": []}
|
||||||
|
|
||||||
|
data["path"] = path
|
||||||
|
if "history" not in data:
|
||||||
|
data["history"] = []
|
||||||
|
data["history"] = [
|
||||||
|
p for p in data["history"] if os.path.normpath(p) != path
|
||||||
|
]
|
||||||
|
data["history"].insert(0, path)
|
||||||
|
data["history"] = data["history"][:10]
|
||||||
|
|
||||||
|
with open(work_dir_file, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
self._set_global_workdir(
|
||||||
|
path
|
||||||
|
) # Update the main manager's working directory
|
||||||
|
|
||||||
|
return {"status": "success", "path": path}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error setting work directory for group {group} at {path}: {e}")
|
||||||
|
return {"status": "error", "message": str(e)}
|
|
@ -0,0 +1,45 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from typing import Dict, Any, List
|
||||||
|
|
||||||
|
|
||||||
|
class GroupManager:
|
||||||
|
def __init__(self, script_groups_path: str):
|
||||||
|
self.script_groups_path = script_groups_path
|
||||||
|
|
||||||
|
def get_script_groups(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Returns list of available script groups with their descriptions."""
|
||||||
|
groups = []
|
||||||
|
if not os.path.isdir(self.script_groups_path):
|
||||||
|
print(
|
||||||
|
f"Warning: Script groups directory not found: {self.script_groups_path}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
for d in os.listdir(self.script_groups_path):
|
||||||
|
group_path = os.path.join(self.script_groups_path, d)
|
||||||
|
if os.path.isdir(group_path):
|
||||||
|
description = self._get_group_description(group_path)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"id": d,
|
||||||
|
"name": description.get("name", d),
|
||||||
|
"description": description.get(
|
||||||
|
"description", "Sin descripción"
|
||||||
|
),
|
||||||
|
"version": description.get("version", "1.0"),
|
||||||
|
"author": description.get("author", "Unknown"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def _get_group_description(self, group_path: str) -> Dict[str, Any]:
|
||||||
|
"""Get description for a script group."""
|
||||||
|
description_file = os.path.join(group_path, "description.json")
|
||||||
|
try:
|
||||||
|
if os.path.exists(description_file):
|
||||||
|
with open(description_file, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading group description from {description_file}: {e}")
|
||||||
|
return {}
|
|
@ -0,0 +1,54 @@
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class Logger:
|
||||||
|
def __init__(self, log_file_path: str):
|
||||||
|
self.log_file = log_file_path
|
||||||
|
self._init_log_file()
|
||||||
|
|
||||||
|
def _init_log_file(self):
|
||||||
|
"""Initialize log file if it doesn't exist"""
|
||||||
|
log_dir = os.path.dirname(self.log_file)
|
||||||
|
if not os.path.exists(log_dir):
|
||||||
|
os.makedirs(log_dir)
|
||||||
|
if not os.path.exists(self.log_file):
|
||||||
|
try:
|
||||||
|
with open(self.log_file, "w", encoding="utf-8") as f:
|
||||||
|
f.write("")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error initializing log file {self.log_file}: {e}")
|
||||||
|
|
||||||
|
def append_log(self, message: str) -> None:
|
||||||
|
"""Append a message to the log file with timestamp."""
|
||||||
|
try:
|
||||||
|
timestamp = datetime.now().strftime("[%H:%M:%S] ")
|
||||||
|
lines = message.split("\n")
|
||||||
|
lines_with_timestamp = [
|
||||||
|
f"{timestamp}{line}\n" for line in lines if line.strip()
|
||||||
|
]
|
||||||
|
|
||||||
|
if lines_with_timestamp:
|
||||||
|
with open(self.log_file, "a", encoding="utf-8") as f:
|
||||||
|
f.writelines(lines_with_timestamp)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to log file {self.log_file}: {e}")
|
||||||
|
|
||||||
|
def read_log(self) -> str:
|
||||||
|
"""Read the entire log file"""
|
||||||
|
try:
|
||||||
|
with open(self.log_file, "r", encoding="utf-8") as f:
|
||||||
|
return f.read()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading log file {self.log_file}: {e}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def clear_log(self) -> bool:
|
||||||
|
"""Clear the log file"""
|
||||||
|
try:
|
||||||
|
with open(self.log_file, "w", encoding="utf-8") as f:
|
||||||
|
f.write("")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error clearing log file {self.log_file}: {e}")
|
||||||
|
return False
|
|
@ -0,0 +1,285 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import traceback
|
||||||
|
from typing import Dict, Any, Optional, Callable
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
data_path: str,
|
||||||
|
script_groups_path: str,
|
||||||
|
get_workdir_func: Callable[[], Optional[str]],
|
||||||
|
):
|
||||||
|
self.data_path = data_path
|
||||||
|
self.script_groups_path = script_groups_path
|
||||||
|
self._get_working_directory = (
|
||||||
|
get_workdir_func # Function to get current workdir from main manager
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_schema(self, level: str, group: str = None) -> Dict[str, Any]:
|
||||||
|
"""Get schema for specified level."""
|
||||||
|
schema_path = self._get_schema_path(level, group)
|
||||||
|
if not schema_path:
|
||||||
|
print(
|
||||||
|
f"Warning: Could not determine schema path for level '{level}', group '{group}'. Returning empty schema."
|
||||||
|
)
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.exists(schema_path):
|
||||||
|
try:
|
||||||
|
with open(schema_path, "r", encoding="utf-8") as f:
|
||||||
|
schema = json.load(f)
|
||||||
|
if (
|
||||||
|
not isinstance(schema, dict)
|
||||||
|
or "properties" not in schema
|
||||||
|
or "type" not in schema
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
f"Warning: Schema file {schema_path} has invalid structure. Returning default."
|
||||||
|
)
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
if not isinstance(schema.get("properties"), dict):
|
||||||
|
print(
|
||||||
|
f"Warning: 'properties' in schema file {schema_path} is not a dictionary. Normalizing."
|
||||||
|
)
|
||||||
|
schema["properties"] = {}
|
||||||
|
return schema
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(
|
||||||
|
f"Error: Could not decode JSON from schema file: {schema_path}. Returning default."
|
||||||
|
)
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"Error reading schema file {schema_path}: {e}. Returning default."
|
||||||
|
)
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Info: Schema file not found at {schema_path}. Creating default schema."
|
||||||
|
)
|
||||||
|
default_schema = {"type": "object", "properties": {}}
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(schema_path), exist_ok=True)
|
||||||
|
with open(schema_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(default_schema, f, indent=2, ensure_ascii=False)
|
||||||
|
return default_schema
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating default schema file at {schema_path}: {e}")
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
|
||||||
|
except ValueError as ve:
|
||||||
|
print(f"Error getting schema path: {ve}")
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
except Exception as e:
|
||||||
|
error_path = schema_path if schema_path else f"Level {level}, Group {group}"
|
||||||
|
print(f"Unexpected error loading schema from {error_path}: {str(e)}")
|
||||||
|
return {"type": "object", "properties": {}}
|
||||||
|
|
||||||
|
def update_schema(
|
||||||
|
self, level: str, data: Dict[str, Any], group: str = None
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Update schema for specified level and clean corresponding config."""
|
||||||
|
schema_path = self._get_schema_path(level, group)
|
||||||
|
config_path = self._get_config_path_for_schema(
|
||||||
|
level, group
|
||||||
|
) # Get corresponding config path
|
||||||
|
|
||||||
|
if not schema_path:
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Could not determine schema path for level '{level}', group '{group}'",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(schema_path), exist_ok=True)
|
||||||
|
|
||||||
|
# Basic validation and normalization of the schema data being saved
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
data = {"type": "object", "properties": {}}
|
||||||
|
if "type" not in data:
|
||||||
|
data["type"] = "object"
|
||||||
|
if "properties" not in data or not isinstance(data["properties"], dict):
|
||||||
|
data["properties"] = {}
|
||||||
|
|
||||||
|
with open(schema_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
print(f"Info: Schema successfully updated at {schema_path}")
|
||||||
|
|
||||||
|
if config_path:
|
||||||
|
self._clean_config_for_schema(config_path, data)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Info: Config cleaning skipped for level {level} (no valid config path)."
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"status": "success"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error updating schema at {schema_path}: {str(e)}")
|
||||||
|
print(traceback.format_exc())
|
||||||
|
return {"status": "error", "message": str(e)}
|
||||||
|
|
||||||
|
def _get_schema_path(
|
||||||
|
self, level: str, group: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Helper to determine the schema file path."""
|
||||||
|
clean_level = str(level).split("-")[0]
|
||||||
|
if clean_level == "1":
|
||||||
|
return os.path.join(self.data_path, "esquema_general.json")
|
||||||
|
elif clean_level == "2":
|
||||||
|
if not group:
|
||||||
|
raise ValueError("Group is required for level 2 schema")
|
||||||
|
return os.path.join(self.script_groups_path, group, "esquema_group.json")
|
||||||
|
elif clean_level == "3":
|
||||||
|
if not group:
|
||||||
|
print(
|
||||||
|
"Warning: Group needed to determine level 3 schema (esquema_work.json)."
|
||||||
|
)
|
||||||
|
return None # Cannot determine without group
|
||||||
|
return os.path.join(self.script_groups_path, group, "esquema_work.json")
|
||||||
|
else:
|
||||||
|
print(f"Warning: Invalid level '{level}' for schema path retrieval.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_config_path_for_schema(
|
||||||
|
self, level: str, group: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Helper to determine the config file path corresponding to a schema level."""
|
||||||
|
clean_level = str(level).split("-")[0]
|
||||||
|
if clean_level == "1":
|
||||||
|
return os.path.join(self.data_path, "data.json")
|
||||||
|
elif clean_level == "2":
|
||||||
|
if not group:
|
||||||
|
return None
|
||||||
|
return os.path.join(self.script_groups_path, group, "data.json")
|
||||||
|
elif clean_level == "3":
|
||||||
|
working_directory = self._get_working_directory()
|
||||||
|
if working_directory and os.path.isdir(working_directory):
|
||||||
|
return os.path.join(working_directory, "data.json")
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Warning: Working directory not set or invalid ('{working_directory}') for level 3 config path."
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _clean_config_for_schema(
|
||||||
|
self, config_path: str, schema: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Clean configuration file to match schema structure."""
|
||||||
|
try:
|
||||||
|
if not os.path.exists(config_path):
|
||||||
|
print(
|
||||||
|
f"Info: Config file {config_path} not found for cleaning. Skipping."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
config = {}
|
||||||
|
content = ""
|
||||||
|
with open(config_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
if content.strip():
|
||||||
|
config = json.loads(content)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Info: Config file {config_path} is empty. Cleaning will result in an empty object."
|
||||||
|
)
|
||||||
|
|
||||||
|
cleaned_config = self._clean_object_against_schema(config, schema)
|
||||||
|
|
||||||
|
try:
|
||||||
|
original_config_str = json.dumps(config, sort_keys=True)
|
||||||
|
cleaned_config_str = json.dumps(cleaned_config, sort_keys=True)
|
||||||
|
except TypeError as te:
|
||||||
|
print(
|
||||||
|
f"Warning: Could not serialize config for comparison during clean: {te}. Forcing save."
|
||||||
|
)
|
||||||
|
original_config_str, cleaned_config_str = "", " " # Force inequality
|
||||||
|
|
||||||
|
if original_config_str != cleaned_config_str or not content.strip():
|
||||||
|
print(f"Info: Cleaning config file: {config_path}")
|
||||||
|
with open(config_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(cleaned_config, f, indent=2, ensure_ascii=False)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Info: Config file {config_path} already matches schema. No cleaning needed."
|
||||||
|
)
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(
|
||||||
|
f"Error: Could not decode JSON from config file {config_path} during cleaning. Skipping clean."
|
||||||
|
)
|
||||||
|
except IOError as e:
|
||||||
|
print(f"Error accessing config file {config_path} during cleaning: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Unexpected error cleaning config {config_path}: {str(e)}")
|
||||||
|
print(traceback.format_exc())
|
||||||
|
|
||||||
|
def _clean_object_against_schema(self, data: Any, schema: Dict[str, Any]) -> Any:
|
||||||
|
"""Recursively clean data to match schema structure."""
|
||||||
|
if not isinstance(schema, dict):
|
||||||
|
print(
|
||||||
|
f"Warning: Invalid schema provided to _clean_object_against_schema (not a dict). Returning data as is: {type(schema)}"
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
schema_type = schema.get("type")
|
||||||
|
|
||||||
|
if schema_type == "object":
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return {}
|
||||||
|
result = {}
|
||||||
|
schema_props = schema.get("properties", {})
|
||||||
|
if not isinstance(schema_props, dict):
|
||||||
|
print(
|
||||||
|
"Warning: 'properties' in schema is not a dictionary during cleaning. Returning empty object."
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
for key, value in data.items():
|
||||||
|
if key in schema_props:
|
||||||
|
prop_schema = schema_props[key]
|
||||||
|
if isinstance(prop_schema, dict):
|
||||||
|
result[key] = self._clean_object_against_schema(
|
||||||
|
value, prop_schema
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Warning: Schema for property '{key}' is not a dictionary. Omitting from cleaned data."
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
elif schema_type == "array":
|
||||||
|
if not isinstance(data, list):
|
||||||
|
return []
|
||||||
|
items_schema = schema.get("items")
|
||||||
|
if isinstance(items_schema, dict):
|
||||||
|
return [
|
||||||
|
self._clean_object_against_schema(item, items_schema)
|
||||||
|
for item in data
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
return data # Keep array items as they are if no valid 'items' schema defined
|
||||||
|
|
||||||
|
elif "enum" in schema:
|
||||||
|
enum_values = schema.get("enum")
|
||||||
|
if isinstance(enum_values, list):
|
||||||
|
if data in enum_values:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return None # Or consider schema.get('default')
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Warning: Invalid 'enum' definition in schema (not a list). Returning None for value '{data}'."
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
elif schema_type in ["string", "integer", "number", "boolean", "null"]:
|
||||||
|
return data # Basic types, return as is (could add type checking)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# print(f"Warning: Unknown or unhandled schema type '{schema_type}' during cleaning. Returning data as is.")
|
||||||
|
return data
|
|
@ -0,0 +1,233 @@
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
from typing import Dict, Any, List, Optional, Callable
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Import necessary handlers/managers
|
||||||
|
from .directory_manager import DirectoryManager
|
||||||
|
from .config_handler import ConfigHandler
|
||||||
|
from .logger import Logger
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptExecutor:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
script_groups_path: str,
|
||||||
|
dir_manager: DirectoryManager,
|
||||||
|
config_handler: ConfigHandler,
|
||||||
|
app_logger: Logger,
|
||||||
|
get_exec_state_func: Callable[
|
||||||
|
[], Dict[str, Any]
|
||||||
|
], # Func to get {last_time, interval}
|
||||||
|
set_last_exec_time_func: Callable[[float], None], # Func to set last exec time
|
||||||
|
):
|
||||||
|
self.script_groups_path = script_groups_path
|
||||||
|
self.dir_manager = dir_manager
|
||||||
|
self.config_handler = config_handler
|
||||||
|
self.app_logger = app_logger # Central application logger instance
|
||||||
|
self._get_exec_state = get_exec_state_func
|
||||||
|
self._set_last_exec_time = set_last_exec_time_func
|
||||||
|
|
||||||
|
def execute_script(
|
||||||
|
self,
|
||||||
|
group: str,
|
||||||
|
script_name: str,
|
||||||
|
broadcast_fn: Optional[Callable[[str], None]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Execute script, broadcast output in real-time, and save final log
|
||||||
|
to a script-specific file in the script's directory.
|
||||||
|
"""
|
||||||
|
exec_state = self._get_exec_state()
|
||||||
|
last_execution_time = exec_state.get("last_time", 0)
|
||||||
|
min_execution_interval = exec_state.get("interval", 1)
|
||||||
|
|
||||||
|
current_time = time.time()
|
||||||
|
time_since_last = current_time - last_execution_time
|
||||||
|
if time_since_last < min_execution_interval:
|
||||||
|
msg = f"Por favor espere {min_execution_interval - time_since_last:.1f} segundo(s) más entre ejecuciones"
|
||||||
|
self.app_logger.append_log(f"Warning: {msg}") # Log throttling attempt
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(msg)
|
||||||
|
return {"status": "throttled", "error": msg}
|
||||||
|
|
||||||
|
self._set_last_exec_time(current_time) # Update last execution time
|
||||||
|
|
||||||
|
script_path = os.path.join(self.script_groups_path, group, script_name)
|
||||||
|
script_dir = os.path.dirname(script_path)
|
||||||
|
script_base_name = os.path.splitext(script_name)[0]
|
||||||
|
script_log_path = os.path.join(script_dir, f"log_{script_base_name}.txt")
|
||||||
|
|
||||||
|
if not os.path.exists(script_path):
|
||||||
|
msg = f"Error Fatal: Script no encontrado en {script_path}"
|
||||||
|
self.app_logger.append_log(msg)
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(msg)
|
||||||
|
return {"status": "error", "error": "Script not found"}
|
||||||
|
|
||||||
|
# Get working directory using DirectoryManager
|
||||||
|
working_dir = self.dir_manager.get_work_dir_for_group(group)
|
||||||
|
if not working_dir:
|
||||||
|
msg = f"Error Fatal: Directorio de trabajo no configurado o inválido para el grupo '{group}'"
|
||||||
|
self.app_logger.append_log(msg)
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(msg)
|
||||||
|
return {"status": "error", "error": "Working directory not set"}
|
||||||
|
if not os.path.isdir(working_dir): # Double check validity
|
||||||
|
msg = f"Error Fatal: El directorio de trabajo '{working_dir}' no es válido o no existe."
|
||||||
|
self.app_logger.append_log(msg)
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(msg)
|
||||||
|
return {"status": "error", "error": "Invalid working directory"}
|
||||||
|
|
||||||
|
# Aggregate configurations using ConfigHandler
|
||||||
|
configs = {
|
||||||
|
"level1": self.config_handler.get_config("1"),
|
||||||
|
"level2": self.config_handler.get_config("2", group),
|
||||||
|
"level3": self.config_handler.get_config(
|
||||||
|
"3", group
|
||||||
|
), # Relies on workdir set in main manager
|
||||||
|
"working_directory": working_dir,
|
||||||
|
}
|
||||||
|
print(
|
||||||
|
f"Debug: Aggregated configs for script execution: {configs}"
|
||||||
|
) # Keep for debug
|
||||||
|
|
||||||
|
config_file_path = os.path.join(script_dir, "script_config.json")
|
||||||
|
try:
|
||||||
|
with open(config_file_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(configs, f, indent=2, ensure_ascii=False)
|
||||||
|
except Exception as e:
|
||||||
|
msg = f"Error Fatal: No se pudieron guardar las configuraciones temporales en {config_file_path}: {str(e)}"
|
||||||
|
self.app_logger.append_log(msg)
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(msg)
|
||||||
|
# Optionally return error here
|
||||||
|
|
||||||
|
stdout_capture = []
|
||||||
|
stderr_capture = ""
|
||||||
|
process = None
|
||||||
|
start_time = datetime.now()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if broadcast_fn:
|
||||||
|
start_msg = f"[{start_time.strftime('%H:%M:%S')}] Iniciando ejecución de {script_name} en {working_dir}..."
|
||||||
|
broadcast_fn(start_msg)
|
||||||
|
|
||||||
|
creation_flags = (
|
||||||
|
subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0
|
||||||
|
)
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
["python", "-u", script_path],
|
||||||
|
cwd=working_dir,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
text=True,
|
||||||
|
encoding="utf-8",
|
||||||
|
errors="replace",
|
||||||
|
bufsize=1,
|
||||||
|
env=dict(os.environ, PYTHONIOENCODING="utf-8"),
|
||||||
|
creationflags=creation_flags,
|
||||||
|
)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = process.stdout.readline()
|
||||||
|
if not line and process.poll() is not None:
|
||||||
|
break
|
||||||
|
if line:
|
||||||
|
cleaned_line = line.rstrip()
|
||||||
|
stdout_capture.append(cleaned_line)
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(cleaned_line)
|
||||||
|
|
||||||
|
return_code = process.wait()
|
||||||
|
end_time = datetime.now()
|
||||||
|
duration = end_time - start_time
|
||||||
|
stderr_capture = process.stderr.read()
|
||||||
|
status = "success" if return_code == 0 else "error"
|
||||||
|
completion_msg = f"[{end_time.strftime('%H:%M:%S')}] Ejecución de {script_name} finalizada ({status}). Duración: {duration}."
|
||||||
|
|
||||||
|
if stderr_capture:
|
||||||
|
if status == "error" and broadcast_fn:
|
||||||
|
broadcast_fn(f"--- ERRORES ---")
|
||||||
|
broadcast_fn(stderr_capture.strip())
|
||||||
|
broadcast_fn(f"--- FIN ERRORES ---")
|
||||||
|
completion_msg += f" Se detectaron errores (ver log)."
|
||||||
|
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(completion_msg)
|
||||||
|
|
||||||
|
# Write to script-specific log file
|
||||||
|
try:
|
||||||
|
with open(script_log_path, "w", encoding="utf-8") as log_f:
|
||||||
|
log_f.write(
|
||||||
|
f"--- Log de Ejecución: {script_name} ---\nGrupo: {group}\nDirectorio de Trabajo: {working_dir}\n"
|
||||||
|
)
|
||||||
|
log_f.write(
|
||||||
|
f"Inicio: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\nFin: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\nDuración: {duration}\n"
|
||||||
|
)
|
||||||
|
log_f.write(
|
||||||
|
f"Estado: {status.upper()} (Código de Salida: {return_code})\n\n--- SALIDA ESTÁNDAR (STDOUT) ---\n"
|
||||||
|
)
|
||||||
|
log_f.write("\n".join(stdout_capture))
|
||||||
|
log_f.write("\n\n--- ERRORES (STDERR) ---\n")
|
||||||
|
log_f.write(stderr_capture if stderr_capture else "Ninguno")
|
||||||
|
log_f.write("\n--- FIN DEL LOG ---\n")
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(f"Log completo guardado en: {script_log_path}")
|
||||||
|
print(f"Info: Script log saved to {script_log_path}")
|
||||||
|
except Exception as log_e:
|
||||||
|
err_msg = f"Error al guardar el log específico del script en {script_log_path}: {log_e}"
|
||||||
|
print(err_msg)
|
||||||
|
self.app_logger.append_log(f"ERROR: {err_msg}")
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(err_msg)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": status,
|
||||||
|
"return_code": return_code,
|
||||||
|
"error": stderr_capture if stderr_capture else None,
|
||||||
|
"log_file": script_log_path,
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
end_time = datetime.now()
|
||||||
|
duration = end_time - start_time
|
||||||
|
error_msg = (
|
||||||
|
f"Error inesperado durante la ejecución de {script_name}: {str(e)}"
|
||||||
|
)
|
||||||
|
traceback_info = traceback.format_exc()
|
||||||
|
print(error_msg)
|
||||||
|
print(traceback_info)
|
||||||
|
self.app_logger.append_log(f"ERROR FATAL: {error_msg}\n{traceback_info}")
|
||||||
|
if broadcast_fn:
|
||||||
|
broadcast_fn(
|
||||||
|
f"[{end_time.strftime('%H:%M:%S')}] ERROR FATAL: {error_msg}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try: # Attempt to write error to script-specific log
|
||||||
|
with open(script_log_path, "w", encoding="utf-8") as log_f:
|
||||||
|
log_f.write(
|
||||||
|
f"--- Log de Ejecución: {script_name} ---\nGrupo: {group}\nDirectorio de Trabajo: {working_dir}\n"
|
||||||
|
)
|
||||||
|
log_f.write(
|
||||||
|
f"Inicio: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\nFin: {end_time.strftime('%Y-%m-%d %H:%M:%S')} (Interrumpido por error)\n"
|
||||||
|
)
|
||||||
|
log_f.write(
|
||||||
|
f"Duración: {duration}\nEstado: FATAL ERROR\n\n--- ERROR ---\n{error_msg}\n\n--- TRACEBACK ---\n{traceback_info}\n--- FIN DEL LOG ---\n"
|
||||||
|
)
|
||||||
|
except Exception as log_e:
|
||||||
|
print(f"Error adicional al intentar guardar el log de error: {log_e}")
|
||||||
|
|
||||||
|
return {"status": "error", "error": error_msg, "traceback": traceback_info}
|
||||||
|
finally:
|
||||||
|
if process and process.stderr:
|
||||||
|
process.stderr.close()
|
||||||
|
if process and process.stdout:
|
||||||
|
process.stdout.close()
|
|
@ -102,3 +102,63 @@
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
border-top: 1px solid #e5e7eb;
|
border-top: 1px solid #e5e7eb;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Estilos para encabezados dentro de la descripción larga del script */
|
||||||
|
.long-description-content h1 {
|
||||||
|
font-size: 1.875rem; /* Equivalente a text-3xl de Tailwind */
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 1rem;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
.long-description-content h2 {
|
||||||
|
font-size: 1.5rem; /* Equivalente a text-2xl */
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 0.875rem;
|
||||||
|
margin-bottom: 0.4rem;
|
||||||
|
}
|
||||||
|
.long-description-content h3 {
|
||||||
|
font-size: 1.25rem; /* Equivalente a text-xl */
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 0.75rem;
|
||||||
|
margin-bottom: 0.3rem;
|
||||||
|
}
|
||||||
|
/* Puedes añadir estilos para h4, h5, h6 si los necesitas */
|
||||||
|
|
||||||
|
.long-description-content hr {
|
||||||
|
margin-top: 1rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
border-top-width: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ajustes opcionales para listas y código si no usas 'prose' */
|
||||||
|
.long-description-content ul,
|
||||||
|
.long-description-content ol {
|
||||||
|
list-style-position: inside;
|
||||||
|
margin-left: 1rem;
|
||||||
|
margin-top: 0.5rem;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
.long-description-content ul {
|
||||||
|
list-style-type: disc;
|
||||||
|
}
|
||||||
|
.long-description-content ol {
|
||||||
|
list-style-type: decimal;
|
||||||
|
}
|
||||||
|
.long-description-content pre {
|
||||||
|
background-color: #f3f4f6; /* bg-gray-100 */
|
||||||
|
padding: 0.75rem;
|
||||||
|
border-radius: 0.25rem;
|
||||||
|
overflow-x: auto;
|
||||||
|
margin-top: 0.5rem;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
.long-description-content code {
|
||||||
|
font-family: monospace;
|
||||||
|
/* Estilo para código en línea si es necesario */
|
||||||
|
}
|
||||||
|
/* Estilo específico para bloques de código dentro de <pre> */
|
||||||
|
.long-description-content pre code {
|
||||||
|
background-color: transparent;
|
||||||
|
padding: 0;
|
||||||
|
}
|
|
@ -86,21 +86,168 @@ async function loadConfigs() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Funciones para Editar Detalles del Script ---
|
||||||
|
|
||||||
|
async function editScriptDetails(group, scriptFilename) {
|
||||||
|
console.log(`[1] editScriptDetails called for: group=${group}, script=${scriptFilename}`); // Log inicial
|
||||||
|
try {
|
||||||
|
console.log('[2] Fetching script details...'); // Log antes del fetch
|
||||||
|
const response = await fetch(`/api/script-details/${group}/${scriptFilename}`);
|
||||||
|
console.log('[3] Fetch response received:', response); // Log después del fetch
|
||||||
|
if (!response.ok) {
|
||||||
|
console.error(`[!] Fetch error: ${response.status} ${response.statusText}`); // Log si la respuesta no es OK
|
||||||
|
throw new Error(`Error fetching script details: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
console.log('[4] Parsing JSON response...'); // Log antes de parsear JSON
|
||||||
|
const details = await response.json();
|
||||||
|
console.log('[5] Script details received:', details); // Log con los detalles
|
||||||
|
|
||||||
|
// Poblar el modal
|
||||||
|
document.getElementById('edit-script-group').value = group;
|
||||||
|
document.getElementById('edit-script-filename').value = scriptFilename;
|
||||||
|
document.getElementById('edit-script-filename-display').textContent = scriptFilename; // Mostrar nombre de archivo
|
||||||
|
document.getElementById('edit-script-display-name').value = details.display_name || '';
|
||||||
|
document.getElementById('edit-script-short-description').value = details.short_description || ''; // Poblar descripción corta
|
||||||
|
document.getElementById('edit-script-long-description').value = details.long_description || '';
|
||||||
|
document.getElementById('edit-script-hidden').checked = details.hidden || false;
|
||||||
|
|
||||||
|
console.log('[6] Populated modal fields.'); // Log después de poblar
|
||||||
|
// Mostrar el modal
|
||||||
|
document.getElementById('script-editor-modal').classList.remove('hidden');
|
||||||
|
console.log('[7] Modal should be visible now.'); // Log final
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[!] Error in editScriptDetails:', error); // Log en el catch
|
||||||
|
alert(`Error al cargar detalles del script: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeScriptEditorModal() {
|
||||||
|
document.getElementById('script-editor-modal').classList.add('hidden');
|
||||||
|
// Limpiar campos si es necesario (opcional)
|
||||||
|
// document.getElementById('edit-script-display-name').value = '';
|
||||||
|
// document.getElementById('edit-script-long-description').value = '';
|
||||||
|
// document.getElementById('edit-script-hidden').checked = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveScriptDetails() {
|
||||||
|
const group = document.getElementById('edit-script-group').value;
|
||||||
|
const scriptFilename = document.getElementById('edit-script-filename').value;
|
||||||
|
const updatedDetails = {
|
||||||
|
display_name: document.getElementById('edit-script-display-name').value,
|
||||||
|
short_description: document.getElementById('edit-script-short-description').value, // Recoger descripción corta
|
||||||
|
long_description: document.getElementById('edit-script-long-description').value,
|
||||||
|
hidden: document.getElementById('edit-script-hidden').checked
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/script-details/${group}/${scriptFilename}`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(updatedDetails)
|
||||||
|
});
|
||||||
|
const result = await response.json();
|
||||||
|
if (!response.ok || result.status !== 'success') {
|
||||||
|
throw new Error(result.message || `Error guardando detalles: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
closeScriptEditorModal();
|
||||||
|
await loadScripts(currentGroup); // Recargar la lista de scripts
|
||||||
|
showToast('Detalles del script guardados con éxito.');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error saving script details:', error);
|
||||||
|
alert(`Error al guardar detalles del script: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Load and display available scripts
|
// Load and display available scripts
|
||||||
async function loadScripts(group) {
|
async function loadScripts(group) {
|
||||||
|
if (!group) {
|
||||||
|
console.warn("loadScripts called without group");
|
||||||
|
document.getElementById('scripts-list').innerHTML = '<p class="text-gray-500">Selecciona un grupo para ver los scripts.</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
const response = await fetch(`/api/scripts/${group}`);
|
const response = await fetch(`/api/scripts/${group}`);
|
||||||
const scripts = await response.json();
|
const scripts = await response.json();
|
||||||
const container = document.getElementById('scripts-list');
|
const container = document.getElementById('scripts-list');
|
||||||
container.innerHTML = scripts.map(script => `
|
container.innerHTML = ''; // Limpiar contenedor antes de añadir nuevos elementos
|
||||||
<div class="mb-4 p-4 border rounded">
|
|
||||||
<div class="font-bold">${script.name}</div>
|
scripts.forEach(script => {
|
||||||
<div class="text-gray-600 text-sm">${script.description}</div>
|
const div = document.createElement('div');
|
||||||
<button onclick="executeScript('${script.name}')"
|
div.className = 'script-item p-4 border rounded bg-white shadow-sm flex justify-between items-start gap-4';
|
||||||
class="mt-2 bg-green-500 text-white px-3 py-1 rounded">
|
div.innerHTML = `
|
||||||
Ejecutar
|
<div>
|
||||||
</button>
|
<div class="font-bold text-lg mb-1">${script.name}</div>
|
||||||
</div>
|
<div class="flex items-center gap-2">
|
||||||
`).join('');
|
<span class="text-gray-600 text-sm">${script.description}</span>
|
||||||
|
${script.long_description ? `
|
||||||
|
<button class="toggle-long-desc-button text-blue-500 hover:text-blue-700 p-0.5 rounded" data-target-id="long-desc-${script.filename}" title="Mostrar/Ocultar detalles">
|
||||||
|
<svg class="w-4 h-4 chevron-down" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 9l-7 7-7-7"></path>
|
||||||
|
</svg>
|
||||||
|
<svg class="w-4 h-4 chevron-up hidden" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 15l7-7 7 7"></path>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
` : ''}
|
||||||
|
</div>
|
||||||
|
<div id="long-desc-${script.filename}" class="long-description-content mt-2 border-t pt-2 hidden">
|
||||||
|
${script.long_description ? (() => { // Self-invoking function to handle markdown rendering
|
||||||
|
if (typeof window.markdownit === 'undefined') { // Check if markdownit is loaded
|
||||||
|
console.error("markdown-it library not loaded!");
|
||||||
|
return `<p class="text-red-500">Error: Librería Markdown no cargada.</p><pre>${script.long_description}</pre>`; // Fallback: show raw text
|
||||||
|
}
|
||||||
|
// Create instance and render
|
||||||
|
const md = window.markdownit();
|
||||||
|
const renderedHtml = md.render(script.long_description); // Renderizar
|
||||||
|
return renderedHtml;
|
||||||
|
})() : ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-center gap-2 flex-shrink-0">
|
||||||
|
<div class="flex flex-col items-center">
|
||||||
|
<button data-filename="${script.filename}"
|
||||||
|
class="bg-green-500 hover:bg-green-600 text-white px-3 py-1 rounded text-sm w-24 text-center execute-button">
|
||||||
|
Ejecutar
|
||||||
|
</button>
|
||||||
|
<div class="text-xs text-gray-500 mt-1 truncate w-24 text-center" title="${script.filename}">${script.filename}</div>
|
||||||
|
</div>
|
||||||
|
<button data-group="${group}" data-filename="${script.filename}"
|
||||||
|
class="p-1 rounded text-gray-500 hover:bg-gray-200 hover:text-gray-700 edit-button" title="Editar Detalles">
|
||||||
|
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"></path>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
container.appendChild(div);
|
||||||
|
|
||||||
|
// Añadir event listeners a los botones recién creados
|
||||||
|
const executeButton = div.querySelector('.execute-button');
|
||||||
|
executeButton.addEventListener('click', () => {
|
||||||
|
executeScript(script.filename);
|
||||||
|
});
|
||||||
|
|
||||||
|
const editButton = div.querySelector('.edit-button');
|
||||||
|
editButton.addEventListener('click', () => {
|
||||||
|
editScriptDetails(group, script.filename);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Añadir event listener para el botón de descripción larga (si existe)
|
||||||
|
const toggleDescButton = div.querySelector('.toggle-long-desc-button');
|
||||||
|
if (toggleDescButton) {
|
||||||
|
toggleDescButton.addEventListener('click', (e) => {
|
||||||
|
const button = e.currentTarget;
|
||||||
|
const targetId = button.dataset.targetId;
|
||||||
|
const targetElement = document.getElementById(targetId);
|
||||||
|
if (targetElement) {
|
||||||
|
targetElement.classList.toggle('hidden');
|
||||||
|
button.querySelector('.chevron-down').classList.toggle('hidden');
|
||||||
|
button.querySelector('.chevron-up').classList.toggle('hidden');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute a script
|
// Execute a script
|
||||||
|
@ -112,7 +259,7 @@ async function executeScript(scriptName) {
|
||||||
const response = await fetch('/api/execute_script', {
|
const response = await fetch('/api/execute_script', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ group: currentGroup, script: scriptName })
|
body: JSON.stringify({ group: currentGroup, script: scriptName }) // scriptName aquí es el filename real
|
||||||
});
|
});
|
||||||
|
|
||||||
// Check for HTTP errors during the *request* itself
|
// Check for HTTP errors during the *request* itself
|
||||||
|
@ -262,11 +409,9 @@ function generateInputField(def, key, value, level) {
|
||||||
class="${baseClasses}" data-key="${key}">`;
|
class="${baseClasses}" data-key="${key}">`;
|
||||||
|
|
||||||
case 'boolean':
|
case 'boolean':
|
||||||
return `<div class="flex items-center">
|
return `<input type="checkbox" ${value ? 'checked' : ''}
|
||||||
<input type="checkbox" ${value ? 'checked' : ''}
|
|
||||||
class="form-checkbox h-5 w-5 bg-green-50" data-key="${key}">
|
class="form-checkbox h-5 w-5 bg-green-50" data-key="${key}">
|
||||||
</div>`;
|
`; // <-- Añadir esta comilla invertida
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return `<input type="text" value="${value || ''}"
|
return `<input type="text" value="${value || ''}"
|
||||||
class="${baseClasses}" data-key="${key}">`;
|
class="${baseClasses}" data-key="${key}">`;
|
||||||
|
@ -1030,8 +1175,8 @@ function fetchLogs() {
|
||||||
fetch('/api/logs')
|
fetch('/api/logs')
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
const logOutput = document.getElementById('log-output');
|
const logOutput = document.getElementById('log-area'); // Corregido ID a log-area
|
||||||
logOutput.textContent = data.logs || 'No hay logs.';
|
logOutput.innerHTML = data.logs || 'No hay logs.'; // Usar innerHTML para mantener formato si existe
|
||||||
logOutput.scrollTop = logOutput.scrollHeight; // Scroll to bottom
|
logOutput.scrollTop = logOutput.scrollHeight; // Scroll to bottom
|
||||||
})
|
})
|
||||||
.catch(error => console.error('Error fetching logs:', error));
|
.catch(error => console.error('Error fetching logs:', error));
|
||||||
|
@ -1043,7 +1188,8 @@ function clearLogs() {
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
if (data.status === 'success') {
|
if (data.status === 'success') {
|
||||||
fetchLogs(); // Refresh logs after clearing
|
// Limpiar el área de log visualmente AHORA
|
||||||
|
document.getElementById('log-area').innerHTML = '';
|
||||||
showToast('Logs borrados correctamente.');
|
showToast('Logs borrados correctamente.');
|
||||||
} else {
|
} else {
|
||||||
showToast('Error al borrar los logs.', 'error');
|
showToast('Error al borrar los logs.', 'error');
|
||||||
|
@ -1059,8 +1205,11 @@ function clearLogs() {
|
||||||
// Necesitarás una función showToast o similar si la usas
|
// Necesitarás una función showToast o similar si la usas
|
||||||
function showToast(message, type = 'success') {
|
function showToast(message, type = 'success') {
|
||||||
// Implementa tu lógica de Toast aquí
|
// Implementa tu lógica de Toast aquí
|
||||||
console.log(`Toast (${type}): ${message}`);
|
console.log(`UI (${type}): ${message}`); // Siempre loguea en consola
|
||||||
alert(`Toast (${type}): ${message}`); // Simple alert como placeholder
|
|
||||||
|
if (type === 'error') {
|
||||||
|
alert(`Error: ${message}`); // Muestra alerta solo para errores
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Llama a fetchLogs al cargar la página si es necesario
|
// Llama a fetchLogs al cargar la página si es necesario
|
||||||
|
|
|
@ -146,7 +146,7 @@
|
||||||
<!-- Scripts List -->
|
<!-- Scripts List -->
|
||||||
<div class="mb-8 bg-white p-6 rounded-lg shadow">
|
<div class="mb-8 bg-white p-6 rounded-lg shadow">
|
||||||
<h2 class="text-xl font-bold mb-4">Scripts Disponibles</h2>
|
<h2 class="text-xl font-bold mb-4">Scripts Disponibles</h2>
|
||||||
<div id="scripts-list"></div>
|
<div id="scripts-list" class="space-y-4"></div> <!-- Añadido space-y-4 para separación -->
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Logs -->
|
<!-- Logs -->
|
||||||
|
@ -209,7 +209,50 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Script Details Editor Modal -->
|
||||||
|
<div id="script-editor-modal" class="hidden fixed inset-0 bg-gray-600 bg-opacity-50 flex items-center justify-center z-50">
|
||||||
|
<div class="modal-content bg-white rounded-lg shadow-lg w-full max-w-lg max-h-[90vh] overflow-auto">
|
||||||
|
<div class="modal-header sticky top-0 bg-white border-b p-4">
|
||||||
|
<div class="flex justify-between items-center">
|
||||||
|
<h3 class="text-xl font-bold">Editar Detalles del Script</h3>
|
||||||
|
<button onclick="closeScriptEditorModal()" class="text-gray-500 hover:text-gray-700">×</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="p-6 space-y-4">
|
||||||
|
<input type="hidden" id="edit-script-group">
|
||||||
|
<input type="hidden" id="edit-script-filename">
|
||||||
|
<div>
|
||||||
|
<label class="block text-sm font-bold mb-1">Nombre del Archivo</label>
|
||||||
|
<p id="edit-script-filename-display" class="text-sm text-gray-600 bg-gray-100 p-2 rounded border"></p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label for="edit-script-display-name" class="block text-sm font-bold mb-2">Nombre a Mostrar</label>
|
||||||
|
<input type="text" id="edit-script-display-name" class="w-full p-2 border rounded">
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label for="edit-script-short-description" class="block text-sm font-bold mb-2">Descripción Corta</label>
|
||||||
|
<input type="text" id="edit-script-short-description" class="w-full p-2 border rounded">
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label for="edit-script-long-description" class="block text-sm font-bold mb-2">Descripción Larga / Ayuda</label>
|
||||||
|
<textarea id="edit-script-long-description" class="w-full p-2 border rounded" rows="5"></textarea>
|
||||||
|
<p class="text-xs text-gray-500 mt-1">Usa Markdown. Doble Enter para párrafo nuevo, dos espacios + Enter para salto de línea simple.</p>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-center">
|
||||||
|
<input type="checkbox" id="edit-script-hidden" class="form-checkbox h-5 w-5 mr-2">
|
||||||
|
<label for="edit-script-hidden" class="text-sm font-bold">Ocultar script (no se podrá ejecutar desde la UI)</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer sticky bottom-0 bg-white border-t p-4 flex justify-end gap-4">
|
||||||
|
<button onclick="closeScriptEditorModal()" class="bg-gray-500 text-white px-4 py-2 rounded">Cancelar</button>
|
||||||
|
<button onclick="saveScriptDetails()" class="bg-blue-500 text-white px-4 py-2 rounded">Guardar Cambios</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Corregir la ruta del script -->
|
<!-- Corregir la ruta del script -->
|
||||||
|
<script src="https://unpkg.com/markdown-it@14.1.0/dist/markdown-it.min.js"></script> <!-- Librería Markdown-it (unpkg) -->
|
||||||
<script src="{{ url_for('static', filename='js/scripts.js') }}" defer></script>
|
<script src="{{ url_for('static', filename='js/scripts.js') }}" defer></script>
|
||||||
<script>
|
<script>
|
||||||
window.addEventListener('load', () => {
|
window.addEventListener('load', () => {
|
||||||
|
|
Loading…
Reference in New Issue