757 lines
35 KiB
Python
757 lines
35 KiB
Python
"""
|
|
LadderToSCL - Conversor de Siemens LAD/FUP XML a SCL
|
|
|
|
Este script convierte archivos XML de Siemens TIA Portal (LAD/FUP) a código SCL equivalente.
|
|
Utiliza una arquitectura modular para facilitar el mantenimiento y la extensión.
|
|
|
|
"""
|
|
|
|
# ToUpload/x0_main.py
|
|
import argparse
|
|
import subprocess
|
|
import os
|
|
import sys
|
|
import locale
|
|
import glob
|
|
import time
|
|
import traceback
|
|
import json
|
|
import datetime # <-- NUEVO: Para timestamps
|
|
import shutil # <-- ADDED: Import shutil for file copying
|
|
script_root = os.path.dirname(
|
|
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
|
)
|
|
sys.path.append(script_root)
|
|
from backend.script_utils import load_configuration
|
|
|
|
# <-- NUEVO: Importar funciones directamente -->
|
|
from x1_to_json import convert_xml_to_json
|
|
from x2_process import process_json_to_scl
|
|
from x3_generate_scl import generate_scl_or_markdown
|
|
# <-- NUEVO: Importar funciones de x4 y x5 -->
|
|
from x4_cross_reference import generate_cross_references # Asumiendo que x4_cross_reference.py tiene esta función
|
|
from x5_aggregate import aggregate_outputs
|
|
|
|
|
|
CONSOLE_ENCODING = "utf-8"
|
|
|
|
# <-- NUEVO: Importar format_variable_name (necesario para predecir nombre de salida) -->
|
|
try:
|
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
if current_dir not in sys.path:
|
|
sys.path.insert(0, current_dir)
|
|
from generators.generator_utils import format_variable_name
|
|
|
|
print("INFO: format_variable_name importado desde generators.generator_utils")
|
|
except ImportError:
|
|
print(
|
|
"ADVERTENCIA: No se pudo importar format_variable_name desde generators. Usando copia local."
|
|
)
|
|
import re
|
|
|
|
def format_variable_name(name):
|
|
if not name:
|
|
return "_INVALID_NAME_"
|
|
if name.startswith('"') and name.endswith('"'):
|
|
return name
|
|
prefix = "#" if name.startswith("#") else ""
|
|
if prefix:
|
|
name = name[1:]
|
|
if name and name[0].isdigit():
|
|
name = "_" + name
|
|
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
|
|
return prefix + name
|
|
|
|
|
|
# <-- NUEVO: Función de Logging -->
|
|
LOG_FILENAME = "log.txt"
|
|
|
|
|
|
def log_message(message, log_file_handle, also_print=True):
|
|
"""Escribe un mensaje en el archivo log y opcionalmente en la consola."""
|
|
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[
|
|
:-3
|
|
] # Incluye milisegundos
|
|
log_line = f"{timestamp} - {message}"
|
|
try:
|
|
log_file_handle.write(log_line + "\n")
|
|
log_file_handle.flush() # Asegurar escritura inmediata
|
|
except Exception as e:
|
|
# Fallback si falla escritura en log
|
|
print(f"{timestamp} - LOGGING ERROR: {e}", file=sys.stderr)
|
|
print(f"{timestamp} - ORIGINAL MSG: {message}", file=sys.stderr)
|
|
if also_print:
|
|
print(message) # Imprimir mensaje original en consola
|
|
|
|
|
|
# <-- FIN NUEVO -->
|
|
|
|
|
|
# <-- run_script ya no es necesaria -->
|
|
|
|
|
|
# --- Función check_skip_status (sin cambios en su lógica interna) ---
|
|
def check_skip_status(
|
|
xml_filepath, processed_json_filepath, final_output_dir, log_f
|
|
): # Añadido log_f
|
|
status = {"skip_x1_x2": False, "skip_x3": False}
|
|
can_check_x3 = False
|
|
if not os.path.exists(processed_json_filepath):
|
|
return status
|
|
stored_mtime = None
|
|
stored_size = None
|
|
block_name = None
|
|
block_type = None
|
|
processed_json_mtime = None
|
|
try:
|
|
processed_json_mtime = os.path.getmtime(processed_json_filepath)
|
|
with open(processed_json_filepath, "r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
stored_mtime = data.get("source_xml_mod_time")
|
|
stored_size = data.get("source_xml_size")
|
|
block_name = data.get("block_name")
|
|
block_type = data.get("block_type")
|
|
except Exception as e:
|
|
log_message(
|
|
f"Advertencia: Error leyendo JSON procesado {processed_json_filepath}: {e}. No se saltará.",
|
|
log_f,
|
|
also_print=False,
|
|
)
|
|
return status
|
|
|
|
if stored_mtime is None or stored_size is None:
|
|
can_check_x3 = block_name is not None and block_type is not None
|
|
else:
|
|
try:
|
|
current_xml_mtime = os.path.getmtime(xml_filepath)
|
|
current_xml_size = os.path.getsize(xml_filepath)
|
|
time_match = abs(stored_mtime - current_xml_mtime) < 0.001
|
|
size_match = stored_size == current_xml_size
|
|
if time_match and size_match:
|
|
status["skip_x1_x2"] = True
|
|
can_check_x3 = True
|
|
except OSError as e:
|
|
log_message(
|
|
f"Advertencia: Error obteniendo metadatos XML para {xml_filepath}: {e}. No se saltará x1/x2.",
|
|
log_f,
|
|
also_print=False,
|
|
)
|
|
can_check_x3 = block_name is not None and block_type is not None
|
|
|
|
if status["skip_x1_x2"] and can_check_x3:
|
|
try:
|
|
expected_extension = (
|
|
".md" if block_type in ["PlcUDT", "PlcTagTable"] else ".scl"
|
|
)
|
|
final_filename = format_variable_name(block_name) + expected_extension
|
|
final_output_path = os.path.join(final_output_dir, final_filename)
|
|
if os.path.exists(final_output_path):
|
|
final_output_mtime = os.path.getmtime(final_output_path)
|
|
if final_output_mtime >= processed_json_mtime:
|
|
status["skip_x3"] = True
|
|
except Exception as e:
|
|
log_message(
|
|
f"Advertencia: Error determinando estado de salto x3 para {block_name or 'desconocido'}: {e}. No se saltará x3.",
|
|
log_f,
|
|
also_print=False,
|
|
)
|
|
return status
|
|
|
|
|
|
# --- FUNCIÓN DE LIMPIEZA (x7) ---------------------------------------------------------------------------
|
|
|
|
def clear_generated_outputs(plc_dir: str = None) -> bool:
|
|
"""Elimina todos los artefactos (JSON, SCL, MD, logs) generados por este script.
|
|
|
|
Si *plc_dir* es None, se comporta de forma análoga al modo orquestador,
|
|
localizando todos los PLCs bajo el *working_directory* configurado y
|
|
limpiándolos uno a uno. Devuelve *True* si la operación terminó sin
|
|
errores críticos, *False* en caso contrario.
|
|
"""
|
|
errors_found = False
|
|
|
|
try:
|
|
configs = load_configuration()
|
|
working_directory = configs.get("working_directory")
|
|
if not working_directory or not os.path.isdir(working_directory):
|
|
print("Error: 'working_directory' inválido en la configuración.", file=sys.stderr)
|
|
return False
|
|
|
|
xml_parser_config = configs.get("level2", {})
|
|
cfg_scl_output_dirname = xml_parser_config.get("scl_output_dir", "scl_output")
|
|
cfg_xref_output_dirname = xml_parser_config.get("xref_output_dir", "xref_output")
|
|
cfg_aggregated_filename = xml_parser_config.get("aggregated_filename", "full_project_representation.md")
|
|
|
|
# Determinar la lista de PLCs a limpiar
|
|
if plc_dir is not None:
|
|
plc_dirs = [os.path.abspath(plc_dir)]
|
|
if not os.path.isdir(plc_dirs[0]):
|
|
print(f"Advertencia: El directorio PLC especificado no existe: {plc_dirs[0]}")
|
|
return False
|
|
else:
|
|
plc_dirs = []
|
|
for entry in os.listdir(working_directory):
|
|
cand_path = os.path.join(working_directory, entry)
|
|
if os.path.isdir(cand_path) and glob.glob(os.path.join(cand_path, "**", "*.xml"), recursive=True):
|
|
plc_dirs.append(cand_path)
|
|
if not plc_dirs:
|
|
plc_dirs = [working_directory]
|
|
|
|
total_dirs_removed = 0
|
|
total_files_removed = 0
|
|
|
|
for plc_path in plc_dirs:
|
|
plc_path = os.path.abspath(plc_path)
|
|
plc_name_safe = os.path.basename(plc_path.strip(os.sep))
|
|
print(f"\n=== Limpiando PLC: {plc_name_safe} ===")
|
|
|
|
# 1) Eliminar carpetas 'parsing' (y su contenido JSON)
|
|
for parsing_dir in glob.glob(os.path.join(plc_path, "**", "parsing"), recursive=True):
|
|
if os.path.isdir(parsing_dir):
|
|
try:
|
|
shutil.rmtree(parsing_dir)
|
|
print(f" - Eliminado directorio de parsing: {os.path.relpath(parsing_dir, working_directory)}")
|
|
total_dirs_removed += 1
|
|
except Exception as e:
|
|
print(f" - ERROR al eliminar {parsing_dir}: {e}")
|
|
errors_found = True
|
|
|
|
# 2) Eliminar directorios de salida SCL y XRef
|
|
for dirname in [cfg_scl_output_dirname, cfg_xref_output_dirname]:
|
|
target_dir = os.path.join(plc_path, dirname)
|
|
if os.path.isdir(target_dir):
|
|
try:
|
|
shutil.rmtree(target_dir)
|
|
print(f" - Eliminado directorio '{dirname}': {os.path.relpath(target_dir, working_directory)}")
|
|
total_dirs_removed += 1
|
|
except Exception as e:
|
|
print(f" - ERROR al eliminar {target_dir}: {e}")
|
|
errors_found = True
|
|
|
|
# 3) Eliminar archivo agregado principal
|
|
agg_file = os.path.join(plc_path, cfg_aggregated_filename)
|
|
if os.path.isfile(agg_file):
|
|
try:
|
|
os.remove(agg_file)
|
|
print(f" - Eliminado archivo agregado: {os.path.relpath(agg_file, working_directory)}")
|
|
total_files_removed += 1
|
|
except Exception as e:
|
|
print(f" - ERROR al eliminar {agg_file}: {e}")
|
|
errors_found = True
|
|
|
|
# 4) Eliminar logs específicos del PLC
|
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
log_path = os.path.join(script_dir, f"log_{plc_name_safe}.txt")
|
|
if os.path.isfile(log_path):
|
|
try:
|
|
os.remove(log_path)
|
|
print(f" - Eliminado log: {os.path.basename(log_path)}")
|
|
total_files_removed += 1
|
|
except Exception as e:
|
|
print(f" - ERROR al eliminar {log_path}: {e}")
|
|
errors_found = True
|
|
|
|
print("\n--- Resumen de limpieza ---")
|
|
print(f" Directorios eliminados: {total_dirs_removed}")
|
|
print(f" Archivos eliminados: {total_files_removed}")
|
|
print(" Limpieza completada." if not errors_found else " Limpieza completada con errores.")
|
|
|
|
return not errors_found
|
|
except Exception as e:
|
|
print(f"ERROR inesperado durante la limpieza: {e}", file=sys.stderr)
|
|
traceback.print_exc()
|
|
return False
|
|
|
|
# --- FIN FUNCIÓN DE LIMPIEZA -----------------------------------------------------------------------------
|
|
|
|
|
|
# --- Bloque Principal ---
|
|
if __name__ == "__main__":
|
|
# -------------------------------------------------------------------------
|
|
# 1. Analizar argumentos de línea de comandos
|
|
# --plc-dir : ruta al PLC a procesar directamente (modo interno)
|
|
# Si NO se pasa el flag, el script actuará como "orquestador" detectando
|
|
# todos los PLCs bajo el working_directory y lanzándose a sí mismo para
|
|
# cada uno de ellos.
|
|
# -------------------------------------------------------------------------
|
|
arg_parser = argparse.ArgumentParser(description="Convertidor XML→SCL (multi-PLC)")
|
|
arg_parser.add_argument("--plc-dir", dest="plc_dir", help="Ruta del PLC a procesar (uso interno).", default=None)
|
|
cli_args, _ = arg_parser.parse_known_args()
|
|
|
|
# Cargar configuración
|
|
configs = load_configuration()
|
|
working_directory = configs.get("working_directory")
|
|
|
|
# -------------------------------------------------------------------------
|
|
# 2. Si NO se indicó --plc-dir ⇒ modo ORQUESTADOR
|
|
# Detecta todos los PLC (subdirectorios con al menos un .xml) y lanza
|
|
# este mismo script para cada uno con el flag --plc-dir.
|
|
# -------------------------------------------------------------------------
|
|
if cli_args.plc_dir is None:
|
|
if not working_directory or not os.path.isdir(working_directory):
|
|
print("Error: 'working_directory' inválido en la configuración.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
# Detectar PLCs como subdirectorios que contengan al menos un XML
|
|
detected_plc_dirs = []
|
|
for entry in os.listdir(working_directory):
|
|
cand_path = os.path.join(working_directory, entry)
|
|
if os.path.isdir(cand_path):
|
|
if glob.glob(os.path.join(cand_path, "**", "*.xml"), recursive=True):
|
|
detected_plc_dirs.append(cand_path)
|
|
|
|
# Si no se encontró ningún PLC (quizás el working_directory ya ES el PLC)
|
|
if not detected_plc_dirs:
|
|
detected_plc_dirs = [working_directory]
|
|
|
|
# Ejecutar secuencialmente el script para cada PLC
|
|
overall_exit_code = 0
|
|
for plc_dir in detected_plc_dirs:
|
|
print(f"\n=== Lanzando procesamiento para PLC: {os.path.basename(plc_dir)} ===")
|
|
ret = subprocess.call([sys.executable, os.path.abspath(__file__), "--plc-dir", plc_dir])
|
|
if ret != 0:
|
|
overall_exit_code = 1 # Registrar fallo global si algún PLC falla
|
|
|
|
sys.exit(overall_exit_code)
|
|
|
|
# -------------------------------------------------------------------------
|
|
# 3. Modo INTERNO (se recibió --plc-dir) ⇒ procesar sólo ese PLC
|
|
# -------------------------------------------------------------------------
|
|
xml_project_dir = os.path.abspath(cli_args.plc_dir)
|
|
if not os.path.isdir(xml_project_dir):
|
|
print(f"Error: El directorio PLC especificado no existe: {xml_project_dir}", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
# Usaremos el nombre del PLC para diferenciar los logs
|
|
plc_name_safe = os.path.basename(xml_project_dir.strip(os.sep))
|
|
|
|
# ---------------------------------------------------------------------
|
|
# 3.1 Leer parámetros específicos del grupo para reutilizarlos más abajo
|
|
# ---------------------------------------------------------------------
|
|
xml_parser_config = configs.get("level2", {})
|
|
|
|
cfg_scl_output_dirname = xml_parser_config.get("scl_output_dir", "scl_output")
|
|
cfg_xref_output_dirname = xml_parser_config.get("xref_output_dir", "xref_output")
|
|
cfg_xref_source_subdir = xml_parser_config.get("xref_source_subdir", "source")
|
|
cfg_call_xref_filename = xml_parser_config.get("call_xref_filename", "xref_calls_tree.md")
|
|
cfg_db_usage_xref_filename = xml_parser_config.get("db_usage_xref_filename", "xref_db_usage_summary.md")
|
|
cfg_plc_tag_xref_filename = xml_parser_config.get("plc_tag_xref_filename", "xref_plc_tags_summary.md")
|
|
|
|
# Conversión de enteros con control de errores
|
|
try:
|
|
cfg_max_call_depth = int(xml_parser_config.get("max_call_depth", 5))
|
|
except (ValueError, TypeError):
|
|
print("Advertencia: Valor inválido para 'max_call_depth' en la configuración. Usando valor por defecto 5.", file=sys.stderr)
|
|
cfg_max_call_depth = 5
|
|
|
|
try:
|
|
cfg_max_users_list = int(xml_parser_config.get("max_users_list", 20))
|
|
except (ValueError, TypeError):
|
|
print("Advertencia: Valor inválido para 'max_users_list' en la configuración. Usando valor por defecto 20.", file=sys.stderr)
|
|
cfg_max_users_list = 20
|
|
|
|
cfg_aggregated_filename = xml_parser_config.get("aggregated_filename", "full_project_representation.md")
|
|
|
|
# Generar un nombre de log específico por PLC
|
|
log_filename_dynamic = f"log_{plc_name_safe}.txt"
|
|
log_filepath = os.path.join(
|
|
os.path.dirname(os.path.abspath(__file__)), log_filename_dynamic
|
|
)
|
|
|
|
# Directorio donde se encuentra este script (x0_main.py)
|
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
# <-- MODIFICADO: Abrir archivo log -->
|
|
with open(
|
|
log_filepath, "w", encoding="utf-8"
|
|
) as log_f: # Usar 'a' para añadir al log
|
|
log_message("=" * 40 + " LOG START " + "=" * 40, log_f)
|
|
|
|
# --- PARTE 1: BUSCAR ARCHIVOS ---
|
|
# Se trabaja exclusivamente dentro del PLC indicado.
|
|
log_message(
|
|
f"Directorio de trabajo base configurado: '{working_directory}'", log_f
|
|
)
|
|
log_message(
|
|
f"Buscando archivos XML recursivamente en: '{xml_project_dir}'", log_f
|
|
)
|
|
|
|
# Patrón de búsqueda global para todos los PLC
|
|
search_pattern = os.path.join(xml_project_dir, "**", "*.xml")
|
|
xml_files_found = glob.glob(search_pattern, recursive=True)
|
|
if not xml_files_found:
|
|
log_message(
|
|
f"No se encontraron archivos XML en '{xml_project_dir}' o sus subdirectorios.",
|
|
log_f,
|
|
)
|
|
sys.exit(0)
|
|
log_message(
|
|
f"Se encontraron {len(xml_files_found)} archivos XML para procesar:", log_f
|
|
)
|
|
xml_files_found.sort()
|
|
[
|
|
log_message(f" - {os.path.relpath(xml_file, working_directory)}", log_f) # Mostrar ruta relativa al working_directory original
|
|
for xml_file in xml_files_found
|
|
]
|
|
|
|
# --- NUEVO: Identificar bloques SCL nativos ---
|
|
log_message("\n--- Fase 0.5: Identificando archivos .scl nativos existentes ---", log_f)
|
|
native_scl_blocks = set()
|
|
try:
|
|
# Usar un patrón similar a la Fase 1.5 para encontrar SCLs en el proyecto fuente
|
|
search_scl_pattern_native = os.path.join(xml_project_dir, "**", "*.scl")
|
|
existing_scl_files_native = glob.glob(search_scl_pattern_native, recursive=True)
|
|
|
|
# Excluir directorios de salida para evitar auto-referencias si están anidados
|
|
scl_output_dir_abs_native = os.path.abspath(os.path.join(xml_project_dir, cfg_scl_output_dirname))
|
|
xref_output_dir_abs_native = os.path.abspath(os.path.join(xml_project_dir, cfg_xref_output_dirname))
|
|
|
|
for scl_file_path in existing_scl_files_native:
|
|
if not os.path.abspath(os.path.dirname(scl_file_path)).startswith(scl_output_dir_abs_native) and \
|
|
not os.path.abspath(os.path.dirname(scl_file_path)).startswith(xref_output_dir_abs_native):
|
|
base_name = os.path.splitext(os.path.basename(scl_file_path))[0]
|
|
native_scl_blocks.add(base_name)
|
|
log_message(f"Se identificaron {len(native_scl_blocks)} posibles bloques SCL nativos (con archivo .scl).", log_f)
|
|
except Exception as e:
|
|
log_message(f"Error durante la identificación de SCL nativos: {e}. Se continuará sin priorización.", log_f)
|
|
# --- FIN NUEVO ---
|
|
|
|
|
|
# --- Directorios de salida ---
|
|
# Estos directorios ahora se crearán DENTRO de xml_project_dir (es decir, dentro de 'PLC')
|
|
scl_output_dir = os.path.join(xml_project_dir, cfg_scl_output_dirname) # Usar valor de config
|
|
xref_output_dir = os.path.join(xml_project_dir, cfg_xref_output_dirname) # Usar valor de config
|
|
# <-- ADDED: Ensure output directories exist -->
|
|
os.makedirs(scl_output_dir, exist_ok=True)
|
|
os.makedirs(xref_output_dir, exist_ok=True)
|
|
# <-- END ADDED -->
|
|
|
|
# --- PARTE 2: PROCESAMIENTO INDIVIDUAL (x1, x2, x3) ---
|
|
log_message("\n--- Fase 1: Procesamiento Individual (x1, x2, x3) ---", log_f)
|
|
# Los nombres de script ya no se usan directamente para x1, x2, x3
|
|
# script1 = "x1_to_json.py"
|
|
# script2 = "x2_process.py"
|
|
# script3 = "x3_generate_scl.py"
|
|
processed_count = 0
|
|
skipped_full_count = 0
|
|
failed_count = 0
|
|
skipped_partial_count = 0
|
|
skipped_for_native_scl = 0 # <-- NUEVO: Contador para SCL nativos
|
|
|
|
for i, xml_filepath in enumerate(xml_files_found):
|
|
relative_path = os.path.relpath(xml_filepath, working_directory)
|
|
log_message(f"\n--- Procesando archivo: {relative_path} ---", log_f)
|
|
|
|
base_filename = os.path.splitext(os.path.basename(xml_filepath))[0]
|
|
parsing_dir = os.path.join(os.path.dirname(xml_filepath), "parsing")
|
|
# Crear directorio de parsing si no existe
|
|
os.makedirs(parsing_dir, exist_ok=True)
|
|
json_output_file = os.path.join(parsing_dir, f"{base_filename}.json")
|
|
processed_json_filepath = os.path.join(
|
|
parsing_dir, f"{base_filename}_processed.json" # <-- Corregido: nombre correcto
|
|
)
|
|
|
|
# --- NUEVO: Comprobar si es un SCL nativo ---
|
|
if base_filename in native_scl_blocks:
|
|
log_message(
|
|
f"--- SALTANDO PROCESAMIENTO XML (x1, x2, x3) para: {relative_path}. Se usará el archivo .scl original existente. ---",
|
|
log_f,
|
|
)
|
|
skipped_for_native_scl += 1
|
|
continue # Pasar al siguiente archivo XML
|
|
# --- FIN NUEVO ---
|
|
|
|
# 1. Comprobar estado de salto
|
|
skip_info = check_skip_status(
|
|
xml_filepath, processed_json_filepath, scl_output_dir, log_f
|
|
) # Pasar log_f
|
|
skip_x1_x2 = skip_info["skip_x1_x2"]
|
|
skip_x3 = skip_info["skip_x3"]
|
|
|
|
# Si se salta todo, registrar y continuar
|
|
if skip_x1_x2 and skip_x3:
|
|
log_message(
|
|
f"--- SALTANDO TODO (x1, x2, x3) para: {relative_path} (XML no modificado, salida final actualizada)",
|
|
log_f,
|
|
)
|
|
skipped_full_count += 1
|
|
processed_count += 1 # Contar como procesado si se salta todo
|
|
continue
|
|
|
|
# Usar try/except para capturar errores en las llamadas directas
|
|
try:
|
|
# 2. Ejecutar/Saltar x1 (convert_xml_to_json)
|
|
if skip_x1_x2:
|
|
log_message(
|
|
f"--- SALTANDO x1 para: {relative_path} (XML no modificado, JSON procesado existe)",
|
|
log_f,
|
|
)
|
|
success_x1 = True # Asumir éxito si se salta
|
|
else:
|
|
log_message(
|
|
f"--- Ejecutando x1 (convert_xml_to_json) para: {relative_path} ---", log_f
|
|
)
|
|
success_x1 = convert_xml_to_json(xml_filepath, json_output_file)
|
|
if not success_x1:
|
|
log_message(f"--- x1 FALLÓ para: {relative_path} ---", log_f, also_print=False) # La función ya imprime el error
|
|
|
|
if not success_x1:
|
|
failed_count += 1
|
|
continue # No continuar si x1 falló
|
|
|
|
# 3. Ejecutar/Saltar x2 (process_json_to_scl)
|
|
if skip_x1_x2: # Si se saltó x1, también se salta x2
|
|
log_message(
|
|
f"--- SALTANDO x2 para: {relative_path} (razón anterior)", log_f
|
|
)
|
|
success_x2 = True # Asumir éxito si se salta
|
|
else:
|
|
log_message(
|
|
f"--- Ejecutando x2 (process_json_to_scl) para: {relative_path} ---", log_f
|
|
)
|
|
success_x2 = process_json_to_scl(json_output_file, processed_json_filepath)
|
|
if not success_x2:
|
|
log_message(f"--- x2 FALLÓ para: {relative_path} ---", log_f, also_print=False)
|
|
|
|
if not success_x2:
|
|
failed_count += 1
|
|
continue # No continuar si x2 falló
|
|
|
|
# 4. Ejecutar x3 (generate_scl_or_markdown) - skip_x3 ya se manejó al principio
|
|
# Si llegamos aquí, x3 SIEMPRE debe ejecutarse (porque skip_x3 era False)
|
|
if skip_x1_x2:
|
|
skipped_partial_count += 1 # Se saltó x1/x2 pero se ejecuta x3
|
|
|
|
log_message(
|
|
f"--- Ejecutando x3 (generate_scl_or_markdown) para: {relative_path} ---", log_f
|
|
)
|
|
# Asegurar que el directorio de salida final exista ANTES de llamar a la función
|
|
os.makedirs(scl_output_dir, exist_ok=True)
|
|
success_x3 = generate_scl_or_markdown(
|
|
processed_json_filepath, scl_output_dir, xml_project_dir
|
|
)
|
|
if not success_x3:
|
|
log_message(f"--- x3 FALLÓ para: {relative_path} ---", log_f, also_print=False)
|
|
failed_count += 1
|
|
continue # No continuar si x3 falló
|
|
|
|
# Si todo fue bien
|
|
processed_count += 1
|
|
|
|
except Exception as e:
|
|
# Capturar cualquier error inesperado durante las llamadas a funciones
|
|
log_message(f"--- ERROR INESPERADO procesando {relative_path}: {e} ---", log_f, also_print=False)
|
|
print(f"--- ERROR INESPERADO procesando {relative_path}: {e} ---", file=sys.stderr)
|
|
traceback_str = traceback.format_exc()
|
|
log_message(traceback_str, log_f, also_print=False) # Loguear traceback
|
|
traceback.print_exc(file=sys.stderr) # Mostrar traceback en consola
|
|
failed_count += 1
|
|
continue # Pasar al siguiente archivo
|
|
|
|
# <-- ADDED: Phase 1.5: Copy existing SCL files -->
|
|
log_message(f"\n--- Fase 1.5: Copiando archivos SCL existentes desde '{xml_project_dir}' a '{scl_output_dir}' ---", log_f)
|
|
copied_scl_count = 0
|
|
skipped_scl_count = 0
|
|
try:
|
|
search_scl_pattern = os.path.join(xml_project_dir, "**", "*.scl")
|
|
existing_scl_files = glob.glob(search_scl_pattern, recursive=True)
|
|
|
|
# Exclude files already in the target scl_output_dir or xref_output_dir to avoid self-copying if nested
|
|
scl_output_dir_abs = os.path.abspath(scl_output_dir)
|
|
xref_output_dir_abs = os.path.abspath(xref_output_dir)
|
|
|
|
filtered_scl_files = [
|
|
f for f in existing_scl_files
|
|
if not os.path.abspath(os.path.dirname(f)).startswith(scl_output_dir_abs) and \
|
|
not os.path.abspath(os.path.dirname(f)).startswith(xref_output_dir_abs)
|
|
]
|
|
|
|
if not filtered_scl_files:
|
|
log_message("No se encontraron archivos .scl existentes para copiar (excluyendo directorios de salida).", log_f)
|
|
else:
|
|
log_message(f"Se encontraron {len(filtered_scl_files)} archivos .scl existentes para copiar:", log_f)
|
|
for src_scl_path in filtered_scl_files:
|
|
relative_scl_path = os.path.relpath(src_scl_path, xml_project_dir)
|
|
dest_scl_path = os.path.join(scl_output_dir, os.path.basename(src_scl_path)) # Copia directa al scl_output del PLC
|
|
|
|
# Check if a file with the same name was already generated from XML
|
|
if os.path.exists(dest_scl_path):
|
|
log_message(f" - Omitiendo copia de '{relative_scl_path}': Ya existe un archivo generado con el mismo nombre en el destino.", log_f, also_print=False)
|
|
skipped_scl_count += 1
|
|
else:
|
|
try:
|
|
log_message(f" - Copiando '{relative_scl_path}' a '{os.path.relpath(dest_scl_path, working_directory)}'", log_f, also_print=False)
|
|
shutil.copy2(src_scl_path, dest_scl_path) # copy2 preserves metadata
|
|
copied_scl_count += 1
|
|
except Exception as copy_err:
|
|
log_message(f" - ERROR copiando '{relative_scl_path}': {copy_err}", log_f)
|
|
# Decide if this should count as a general failure
|
|
log_message(f"Copia de SCL existentes finalizada. Copiados: {copied_scl_count}, Omitidos (conflicto nombre): {skipped_scl_count}", log_f)
|
|
|
|
except Exception as e:
|
|
log_message(f"Error durante la Fase 1.5 (Copia SCL): {e}", log_f)
|
|
# <-- END ADDED -->
|
|
|
|
# --- PARTE 3: EJECUTAR x4 (Referencias Cruzadas) ---
|
|
log_message(
|
|
f"\n--- Fase 2: Ejecutando x4_cross_reference.py (salida en '{cfg_xref_output_dirname}/') ---", # Usar valor de config
|
|
log_f,
|
|
)
|
|
run_x4 = True
|
|
success_x4 = False
|
|
# La condición para ejecutar x4 ahora depende de si *algún* archivo tuvo éxito en x1 y x2
|
|
# (Necesitamos una forma de rastrear esto, o simplemente intentarlo si no hubo fallos fatales antes)
|
|
# Simplificación: Ejecutar x4 si no todos los archivos fallaron en x1/x2.
|
|
# Una mejor comprobación sería ver si existe algún archivo _processed.json
|
|
can_run_x4 = failed_count < len(xml_files_found) # Aproximación simple
|
|
if not can_run_x4 and len(xml_files_found) > 0:
|
|
log_message(
|
|
"Advertencia: Todos los archivos fallaron en x1/x2. Saltando x4.", log_f
|
|
)
|
|
run_x4 = False
|
|
elif len(xml_files_found) == 0:
|
|
run_x4 = False # No hay archivos, no ejecutar
|
|
|
|
if run_x4:
|
|
log_message(
|
|
f"Ejecutando x4 (generate_cross_references) sobre: {xml_project_dir}, salida en: {xref_output_dir}",
|
|
log_f,
|
|
)
|
|
try:
|
|
# Llamada directa a la función de x4
|
|
# <-- MODIFICADO: Pasar todos los parámetros leídos de la config -->
|
|
success_x4 = generate_cross_references(
|
|
xml_project_dir,
|
|
xref_output_dir,
|
|
cfg_scl_output_dirname,
|
|
cfg_xref_source_subdir,
|
|
cfg_call_xref_filename,
|
|
cfg_db_usage_xref_filename,
|
|
cfg_plc_tag_xref_filename,
|
|
cfg_max_call_depth,
|
|
cfg_max_users_list)
|
|
if not success_x4:
|
|
# La función interna ya debería haber impreso/logueado el error específico
|
|
log_message(f"--- x4 (generate_cross_references) FALLÓ. ---", log_f, also_print=False)
|
|
except Exception as e:
|
|
# Capturar error inesperado en la llamada a x4
|
|
log_message(f"--- ERROR INESPERADO ejecutando x4: {e} ---", log_f, also_print=False)
|
|
print(f"--- ERROR INESPERADO ejecutando x4: {e} ---", file=sys.stderr)
|
|
traceback_str = traceback.format_exc()
|
|
log_message(traceback_str, log_f, also_print=False)
|
|
traceback.print_exc(file=sys.stderr)
|
|
success_x4 = False # Marcar como fallo
|
|
else:
|
|
log_message("Fase 2 (x4) omitida.", log_f)
|
|
|
|
# --- PARTE 4: EJECUTAR x5 (Agregación) ---
|
|
log_message(
|
|
f"\n--- Fase 3: Ejecutando x5_aggregate.py (salida en '{cfg_aggregated_filename}') ---", # Usar valor de config
|
|
log_f
|
|
)
|
|
run_x5 = True
|
|
success_x5 = False
|
|
# Condición similar a x4: ejecutar si no todo falló en x1/x2/x3
|
|
can_run_x5 = failed_count < len(xml_files_found)
|
|
if not can_run_x5 and len(xml_files_found) > 0:
|
|
log_message(
|
|
"Advertencia: Todos los archivos fallaron en x1/x2/x3. Saltando x5.", log_f
|
|
)
|
|
run_x5 = False
|
|
elif len(xml_files_found) == 0:
|
|
run_x5 = False
|
|
|
|
if run_x5:
|
|
# El archivo agregado se guarda dentro del PLC para mantener salidas separadas
|
|
output_agg_file = os.path.join(xml_project_dir, cfg_aggregated_filename)
|
|
log_message(
|
|
f"Ejecutando x5 (aggregate_outputs) sobre: {xml_project_dir}, salida agregada en: {output_agg_file}",
|
|
log_f
|
|
)
|
|
try:
|
|
# Llamada directa a la función de x5
|
|
# <-- MODIFICADO: Pasar los parámetros necesarios leídos de la config -->
|
|
success_x5 = aggregate_outputs(
|
|
xml_project_dir,
|
|
output_agg_file,
|
|
cfg_scl_output_dirname,
|
|
cfg_xref_output_dirname)
|
|
if not success_x5:
|
|
# La función interna ya debería haber impreso/logueado el error específico
|
|
log_message(f"--- x5 (aggregate_outputs) FALLÓ. ---", log_f, also_print=False)
|
|
except Exception as e:
|
|
# Capturar error inesperado en la llamada a x5
|
|
log_message(f"--- ERROR INESPERADO ejecutando x5: {e} ---", log_f, also_print=False)
|
|
print(f"--- ERROR INESPERADO ejecutando x5: {e} ---", file=sys.stderr)
|
|
traceback_str = traceback.format_exc()
|
|
log_message(traceback_str, log_f, also_print=False)
|
|
traceback.print_exc(file=sys.stderr)
|
|
success_x5 = False # Marcar como fallo
|
|
else:
|
|
log_message("Fase 3 (x5) omitida.", log_f)
|
|
|
|
# --- PARTE 5: RESUMEN FINAL --- (MOVIDO AQUÍ)
|
|
# --- PARTE 5: RESUMEN FINAL ---
|
|
log_message(
|
|
"\n" + "-" * 20 + " Resumen Final del Procesamiento Completo " + "-" * 20,
|
|
log_f,
|
|
)
|
|
log_message(f"Total de archivos XML encontrados: {len(xml_files_found)}", log_f)
|
|
log_message(
|
|
f"Archivos procesados/actualizados exitosamente (x1-x3): {processed_count}",
|
|
log_f,
|
|
)
|
|
log_message(
|
|
f"Archivos completamente saltados (x1, x2, x3): {skipped_full_count}", log_f
|
|
)
|
|
log_message(
|
|
f"Archivos parcialmente saltados (x1, x2 saltados; x3 ejecutado): {skipped_partial_count}",
|
|
log_f,
|
|
)
|
|
log_message(f"Archivos fallidos (en x1, x2, x3 o error inesperado): {failed_count}", log_f)
|
|
log_message( # <-- NUEVO: Reportar SCL nativos saltados
|
|
f"Archivos XML omitidos (priorizando .scl nativo): {skipped_for_native_scl}",
|
|
log_f,
|
|
)
|
|
log_message(f"Archivos SCL existentes copiados (Fase 1.5): {copied_scl_count}", log_f) # <-- ADDED: Report copied SCL
|
|
log_message(f"Archivos SCL existentes omitidos por conflicto (Fase 1.5): {skipped_scl_count}", log_f) # <-- ADDED: Report skipped SCL
|
|
log_message(
|
|
f"Fase 2 (Generación XRef - x4): {'Completada' if run_x4 and success_x4 else ('Fallida' if run_x4 and not success_x4 else 'Omitida')}",
|
|
log_f,
|
|
)
|
|
log_message(
|
|
f"Fase 3 (Agregación - x5): {'Completada' if run_x5 and success_x5 else ('Fallida' if run_x5 and not success_x5 else 'Omitida')}",
|
|
log_f,
|
|
)
|
|
log_message("-" * (80), log_f)
|
|
|
|
has_errors = (
|
|
failed_count > 0
|
|
or (run_x4 and not success_x4)
|
|
or (run_x5 and not success_x5)
|
|
)
|
|
|
|
# Mensaje final en consola
|
|
final_console_message = "Proceso finalizado exitosamente."
|
|
exit_code = 0
|
|
if has_errors:
|
|
final_console_message = "Proceso finalizado con errores."
|
|
exit_code = 1
|
|
|
|
log_message(final_console_message, log_f) # Loguear mensaje final
|
|
print(
|
|
f"\n{final_console_message} Consulta '{log_filename_dynamic}' para detalles."
|
|
) # Mostrar mensaje en consola
|
|
log_message("="*41 + " LOG END " + "="*42, log_f)
|
|
|
|
# <-- NUEVO: Flush explícito antes de salir -->
|
|
try:
|
|
log_f.flush()
|
|
os.fsync(log_f.fileno()) # Intenta forzar escritura a disco (puede no funcionar en todos los OS)
|
|
except Exception as flush_err:
|
|
print(f"Advertencia: Error durante flush/fsync final del log: {flush_err}", file=sys.stderr)
|
|
# <-- FIN NUEVO -->
|
|
|
|
# Mensaje final ya impreso antes del flush
|
|
sys.exit(exit_code) # Salir con el código apropiado
|