# -*- coding: utf-8 -*- import json import argparse import os import copy import traceback import re import importlib import sys import sympy # Import sympy # Import necessary components from processors directory from processors.processor_utils import ( format_variable_name, # Keep if used outside processors sympy_expr_to_scl, # Needed for IF grouping and maybe others # get_target_scl_name might be used here? Unlikely. ) from processors.symbol_manager import SymbolManager # Import the manager # --- Constantes y Configuración --- # SCL_SUFFIX = "_scl" # Old suffix SCL_SUFFIX = "_sympy_processed" # New suffix to indicate processing method GROUPED_COMMENT = "// Logic included in grouped IF" SIMPLIFIED_IF_COMMENT = "// Simplified IF condition by script" # May still be useful # Global data dictionary (consider passing 'data' as argument if needed elsewhere) # It's currently used by process_group_ifs implicitly via the outer scope, # which works but passing it explicitly might be cleaner. data = {} def process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data): """ Busca condiciones (ya procesadas -> tienen expr SymPy en sympy_map) y, si habilitan un grupo (>1) de bloques funcionales (con SCL ya generado), construye el bloque IF agrupado CON LA CONDICIÓN SIMPLIFICADA. Modifica el campo 'scl' de la instrucción generadora de condición. (Esta es la implementación de la función como la tenías en el archivo original) """ instr_uid = instruction["instruction_uid"] instr_type_original = instruction.get("type", "").replace(SCL_SUFFIX, "").replace("_error", "") made_change = False # Check if this instruction *could* generate a condition suitable for grouping # It must have been processed by the new SymPy method if ( not instruction.get("type", "").endswith(SCL_SUFFIX) # Check if processed by new method or "_error" in instruction.get("type", "") or instruction.get("grouped", False) or instr_type_original not in [ # Original types that produce boolean results "Contact", "O", "Eq", "Ne", "Gt", "Lt", "Ge", "Le", "PBox", "NBox", "And", "Xor", "Not" # Add others like comparison ] ): return False # Avoid reagruping if SCL already contains a complex IF (less likely now) current_scl = instruction.get("scl", "") if current_scl.strip().startswith("IF") and "END_IF;" in current_scl and GROUPED_COMMENT not in current_scl: return False # *** Get the SymPy expression for the condition *** map_key_out = (network_id, instr_uid, "out") sympy_condition_expr = sympy_map.get(map_key_out) # No SymPy expression found or trivial conditions if sympy_condition_expr is None or sympy_condition_expr in [sympy.true, sympy.false]: return False # --- Find consumer instructions (logic similar to before) --- grouped_instructions_cores = [] consumer_instr_list = [] network_logic = next((net["logic"] for net in data["networks"] if net["id"] == network_id), []) if not network_logic: return False groupable_types = [ # Types whose *final SCL* we want to group "Move", "Add", "Sub", "Mul", "Div", "Mod", "Convert", "Call_FC", "Call_FB", # Assuming these generate final SCL in their processors now # SCoil/RCoil might also be groupable if their SCL is final assignment "SCoil", "RCoil" ] for consumer_instr in network_logic: consumer_uid = consumer_instr["instruction_uid"] if consumer_instr.get("grouped", False) or consumer_uid == instr_uid: continue consumer_en = consumer_instr.get("inputs", {}).get("en") consumer_type = consumer_instr.get("type", "") # Current type suffix matters consumer_type_original = consumer_type.replace(SCL_SUFFIX, "").replace("_error", "") is_enabled_by_us = False if ( isinstance(consumer_en, dict) and consumer_en.get("type") == "connection" and consumer_en.get("source_instruction_uid") == instr_uid and consumer_en.get("source_pin") == "out"): is_enabled_by_us = True # Check if consumer is groupable AND has its final SCL generated # The suffix check needs adjustment based on how terminating processors set it. # Assuming processors like Move, Add, Call, SCoil, RCoil NOW generate final SCL and add a suffix. if ( is_enabled_by_us and consumer_type.endswith(SCL_SUFFIX) and # Or a specific "final_scl" suffix consumer_type_original in groupable_types ): consumer_scl = consumer_instr.get("scl", "") # Extract core SCL (logic is similar, maybe simpler if SCL is cleaner now) core_scl = None if consumer_scl: # If consumer SCL itself is an IF generated by EN, take the body if consumer_scl.strip().startswith("IF"): match = re.search(r"THEN\s*(.*?)\s*END_IF;", consumer_scl, re.DOTALL | re.IGNORECASE) core_scl = match.group(1).strip() if match else None elif not consumer_scl.strip().startswith("//"): # Otherwise, take the whole line if not comment core_scl = consumer_scl.strip() if core_scl: grouped_instructions_cores.append(core_scl) consumer_instr_list.append(consumer_instr) # --- If groupable consumers found --- if len(grouped_instructions_cores) > 1: print(f"INFO: Agrupando {len(grouped_instructions_cores)} instr. bajo condición de {instr_type_original} UID {instr_uid}") # *** Simplify the SymPy condition *** try: #simplified_expr = sympy.simplify_logic(sympy_condition_expr, force=True) simplified_expr = sympy.logic.boolalg.to_dnf(sympy_condition_expr, simplify=True) except Exception as e: print(f"Error simplifying condition for grouping UID {instr_uid}: {e}") simplified_expr = sympy_condition_expr # Fallback # *** Convert simplified condition to SCL string *** condition_scl_simplified = sympy_expr_to_scl(simplified_expr, symbol_manager) # *** Build the grouped IF SCL *** scl_grouped_lines = [f"IF {condition_scl_simplified} THEN"] for core_line in grouped_instructions_cores: indented_core = "\n".join([f" {line.strip()}" for line in core_line.splitlines()]) scl_grouped_lines.append(indented_core) scl_grouped_lines.append("END_IF;") final_grouped_scl = "\n".join(scl_grouped_lines) # Update the generator instruction's SCL instruction["scl"] = final_grouped_scl # Mark consumers as grouped for consumer_instr in consumer_instr_list: consumer_instr["scl"] = f"{GROUPED_COMMENT} (by UID {instr_uid})" consumer_instr["grouped"] = True made_change = True return made_change def load_processors(processors_dir="processors"): """ Escanea el directorio, importa módulos, construye el mapa y una lista ordenada por prioridad. """ processor_map = {} processor_list_unsorted = [] # Lista para guardar (priority, type_name, func) default_priority = 10 # Prioridad si no se define en get_processor_info if not os.path.isdir(processors_dir): print(f"Error: Directorio de procesadores no encontrado: '{processors_dir}'") return processor_map, [] # Devuelve mapa vacío y lista vacía print(f"Cargando procesadores desde: '{processors_dir}'") processors_package = os.path.basename(processors_dir) for filename in os.listdir(processors_dir): if filename.startswith("process_") and filename.endswith(".py"): module_name_rel = filename[:-3] full_module_name = f"{processors_package}.{module_name_rel}" try: module = importlib.import_module(full_module_name) if hasattr(module, 'get_processor_info') and callable(module.get_processor_info): processor_info = module.get_processor_info() info_list = [] if isinstance(processor_info, dict): info_list = [processor_info] elif isinstance(processor_info, list): info_list = processor_info else: print(f" Advertencia: get_processor_info en {full_module_name} devolvió tipo inesperado. Se ignora.") continue for info in info_list: if isinstance(info, dict) and 'type_name' in info and 'processor_func' in info: type_name = info['type_name'].lower() processor_func = info['processor_func'] # Obtener prioridad, usar default si no existe priority = info.get('priority', default_priority) if callable(processor_func): if type_name in processor_map: print(f" Advertencia: '{type_name}' en {full_module_name} sobrescribe definición anterior.") processor_map[type_name] = processor_func # Añadir a la lista para ordenar processor_list_unsorted.append({'priority': priority, 'type_name': type_name, 'func': processor_func}) print(f" - Cargado '{type_name}' (Prio: {priority}) desde {module_name_rel}.py") else: print(f" Advertencia: 'processor_func' para '{type_name}' en {full_module_name} no es callable.") else: print(f" Advertencia: Entrada inválida en {full_module_name}: {info}") else: print(f" Advertencia: Módulo {module_name_rel}.py no tiene 'get_processor_info'.") except ImportError as e: print(f"Error importando {full_module_name}: {e}") except Exception as e: print(f"Error procesando {full_module_name}: {e}") traceback.print_exc() # Ordenar la lista por prioridad (menor primero) processor_list_sorted = sorted(processor_list_unsorted, key=lambda x: x['priority']) print(f"\nTotal de tipos de procesadores cargados: {len(processor_map)}") print(f"Orden de procesamiento por prioridad: {[item['type_name'] for item in processor_list_sorted]}") # Devolver el mapa (para lookup rápido si es necesario) y la lista ordenada return processor_map, processor_list_sorted # --- Bucle Principal de Procesamiento (Modificado para STL) --- def process_json_to_scl(json_filepath): """ Lee el JSON simplificado, aplica los procesadores dinámicamente cargados siguiendo un orden de prioridad (ignorando redes STL), y guarda el JSON procesado. """ global data # Necesario para que load_processors y process_group_ifs (definidas fuera) puedan acceder a ella. # Considerar pasar 'data' como argumento si es posible refactorizar. if not os.path.exists(json_filepath): print(f"Error: JSON no encontrado: {json_filepath}") return print(f"Cargando JSON desde: {json_filepath}") try: with open(json_filepath, "r", encoding="utf-8") as f: data = json.load(f) # Carga en 'data' global except Exception as e: print(f"Error al cargar JSON: {e}") traceback.print_exc() return # --- Carga dinámica de procesadores --- script_dir = os.path.dirname(__file__) processors_dir_path = os.path.join(script_dir, 'processors') processor_map, sorted_processors = load_processors(processors_dir_path) if not processor_map: print("Error crítico: No se cargaron procesadores. Abortando.") return # --- Crear mapas de acceso por red --- network_access_maps = {} # (La lógica para llenar network_access_maps no cambia, puedes copiarla de tu original) for network in data.get("networks", []): net_id = network["id"] current_access_map = {} for instr in network.get("logic", []): for _, source in instr.get("inputs", {}).items(): sources_to_check = (source if isinstance(source, list) else ([source] if isinstance(source, dict) else [])) for src in sources_to_check: if (isinstance(src, dict) and src.get("uid") and src.get("type") in ["variable", "constant"]): current_access_map[src["uid"]] = src for _, dest_list in instr.get("outputs", {}).items(): if isinstance(dest_list, list): for dest in dest_list: if (isinstance(dest, dict) and dest.get("uid") and dest.get("type") in ["variable", "constant"]): current_access_map[dest["uid"]] = dest network_access_maps[net_id] = current_access_map # --- Inicializar mapa SymPy y SymbolManager --- symbol_manager = SymbolManager() sympy_map = {} max_passes = 30 passes = 0 processing_complete = False print("\n--- Iniciando Bucle de Procesamiento Iterativo (con SymPy y prioridad) ---") while passes < max_passes and not processing_complete: passes += 1 made_change_in_base_pass = False made_change_in_group_pass = False print(f"\n--- Pase {passes} ---") num_sympy_processed_this_pass = 0 num_grouped_this_pass = 0 # --- FASE 1: Procesadores Base (Ignorando STL) --- print(f" Fase 1 (SymPy Base - Orden por Prioridad):") num_sympy_processed_this_pass = 0 for processor_info in sorted_processors: current_type_name = processor_info['type_name'] func_to_call = processor_info['func'] for network in data.get("networks", []): network_id = network["id"] network_lang = network.get("language", "LAD") # Obtener lenguaje de la red # *** IGNORAR REDES STL EN ESTA FASE *** if network_lang == "STL": continue # Saltar al siguiente network access_map = network_access_maps.get(network_id, {}) network_logic = network.get("logic", []) for instruction in network_logic: instr_uid = instruction.get("instruction_uid") instr_type_original = instruction.get("type", "Unknown") # Saltar si ya procesado, error, agrupado o es chunk STL/SCL/Unsupported if (instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original or instruction.get("grouped", False) or instr_type_original in ["RAW_STL_CHUNK", "RAW_SCL_CHUNK", "UNSUPPORTED_LANG"]): continue # Determinar tipo efectivo (como antes) lookup_key = instr_type_original.lower() effective_type_name = lookup_key if instr_type_original == "Call": block_type = instruction.get("block_type", "").upper() if block_type == "FC": effective_type_name = "call_fc" elif block_type == "FB": effective_type_name = "call_fb" # Llamar al procesador si coincide el tipo if effective_type_name == current_type_name: try: # Pasa sympy_map, symbol_manager y data changed = func_to_call(instruction, network_id, sympy_map, symbol_manager, data) if changed: made_change_in_base_pass = True num_sympy_processed_this_pass += 1 except Exception as e: print(f"ERROR(SymPy Base) al procesar {instr_type_original} UID {instr_uid}: {e}") traceback.print_exc() instruction["scl"] = f"// ERROR en SymPy procesador base: {e}" instruction["type"] = instr_type_original + "_error" made_change_in_base_pass = True # Marcar cambio aunque sea error print(f" -> {num_sympy_processed_this_pass} instrucciones (no STL) procesadas con SymPy.") # --- FASE 2: Agrupación IF (Ignorando STL) --- if made_change_in_base_pass or passes == 1: print(f" Fase 2 (Agrupación IF con Simplificación):") num_grouped_this_pass = 0 for network in data.get("networks", []): network_id = network["id"] network_lang = network.get("language", "LAD") # Obtener lenguaje # *** IGNORAR REDES STL EN ESTA FASE *** if network_lang == "STL": continue # Saltar red STL network_logic = network.get("logic", []) for instruction in network_logic: try: # Llama a process_group_ifs (que necesita acceso a 'data' global o pasado) group_changed = process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data) if group_changed: made_change_in_group_pass = True num_grouped_this_pass += 1 except Exception as e: print(f"ERROR(GroupLoop) al intentar agrupar desde UID {instruction.get('instruction_uid')}: {e}") traceback.print_exc() print(f" -> {num_grouped_this_pass} agrupaciones realizadas (en redes no STL).") # --- Comprobar si se completó el procesamiento --- if not made_change_in_base_pass and not made_change_in_group_pass: print(f"\n--- No se hicieron más cambios en el pase {passes}. Proceso iterativo completado. ---") processing_complete = True else: print(f"--- Fin Pase {passes}: {num_sympy_processed_this_pass} proc SymPy, {num_grouped_this_pass} agrup. Continuando...") # --- Comprobar límite de pases --- if passes == max_passes and not processing_complete: print(f"\n--- ADVERTENCIA: Límite de {max_passes} pases alcanzado...") # --- FIN BUCLE ITERATIVO --- # --- Verificación Final (Ajustada para RAW_STL_CHUNK) --- print("\n--- Verificación Final de Instrucciones No Procesadas ---") unprocessed_count = 0 unprocessed_details = [] # Añadir RAW_STL_CHUNK a los tipos ignorados ignored_types = ['raw_scl_chunk', 'unsupported_lang', 'raw_stl_chunk'] # Añadido raw_stl_chunk for network in data.get("networks", []): network_id = network.get("id", "Unknown ID") network_title = network.get("title", f"Network {network_id}") network_lang = network.get("language", "LAD") # Obtener lenguaje # No verificar instrucciones dentro de redes STL, ya que no se procesan if network_lang == "STL": continue for instruction in network.get("logic", []): instr_uid = instruction.get("instruction_uid", "Unknown UID") instr_type = instruction.get("type", "Unknown Type") is_grouped = instruction.get("grouped", False) # Condición revisada para ignorar los chunks crudos if (not instr_type.endswith(SCL_SUFFIX) and "_error" not in instr_type and not is_grouped and instr_type.lower() not in ignored_types): # Verifica contra lista actualizada unprocessed_count += 1 unprocessed_details.append( f" - Red '{network_title}' (ID: {network_id}, Lang: {network_lang}), " f"Instrucción UID: {instr_uid}, Tipo: '{instr_type}'" ) if unprocessed_count > 0: print(f"ADVERTENCIA: Se encontraron {unprocessed_count} instrucciones (no STL) que parecen no haber sido procesadas:") for detail in unprocessed_details: print(detail) else: print("INFO: Todas las instrucciones relevantes (no STL) parecen haber sido procesadas o agrupadas.") # --- Guardar JSON Final --- output_filename = json_filepath.replace("_simplified.json", "_simplified_processed.json") print(f"\nGuardando JSON procesado en: {output_filename}") try: with open(output_filename, "w", encoding="utf-8") as f: json.dump(data, f, indent=4, ensure_ascii=False) print("Guardado completado.") except Exception as e: print(f"Error Crítico al guardar JSON procesado: {e}") traceback.print_exc() # --- Ejecución (sin cambios) --- if __name__ == "__main__": # Imports necesarios solo para la ejecución como script principal import argparse import os import sys # Configurar ArgumentParser para recibir la ruta del XML original obligatoria parser = argparse.ArgumentParser( description="Process simplified JSON (_simplified.json) to embed SCL logic (SymPy version). Expects original XML filepath as argument." ) parser.add_argument( "source_xml_filepath", # Argumento posicional obligatorio help="Path to the original source XML file (passed from x0_main.py, used to derive JSON input name).", ) args = parser.parse_args() # Parsea los argumentos de sys.argv source_xml_file = args.source_xml_filepath # Obtiene la ruta del XML original # Verificar si el archivo XML original existe (como referencia, útil para depuración) # No es estrictamente necesario para la lógica aquí, pero ayuda a confirmar if not os.path.exists(source_xml_file): print(f"Advertencia (x2): Archivo XML original no encontrado: '{source_xml_file}', pero se intentará encontrar el JSON correspondiente.") # No salir necesariamente, pero es bueno saberlo. # Derivar nombre del archivo JSON de entrada (_simplified.json) xml_filename_base = os.path.splitext(os.path.basename(source_xml_file))[0] # Asumir que el JSON simplificado está en el mismo directorio que el XML original input_dir = os.path.dirname(source_xml_file) # Directorio del XML original input_json_file = os.path.join(input_dir, f"{xml_filename_base}_simplified.json") # Determinar el nombre esperado del archivo JSON procesado de salida output_json_file = os.path.join(input_dir, f"{xml_filename_base}_simplified_processed.json") print(f"(x2) Procesando: '{os.path.relpath(input_json_file)}' -> '{os.path.relpath(output_json_file)}'") # Verificar si el archivo JSON de entrada (_simplified.json) EXISTE antes de procesar if not os.path.exists(input_json_file): print(f"Error Fatal (x2): El archivo de entrada JSON simplificado no existe: '{input_json_file}'") print(f"Asegúrate de que 'x1_to_json.py' se ejecutó correctamente para '{os.path.relpath(source_xml_file)}'.") sys.exit(1) # Salir si el archivo necesario no está else: # Llamar a la función principal de procesamiento del script # Asumiendo que tu función principal se llama process_json_to_scl(input_json_path) try: process_json_to_scl(input_json_file) except Exception as e: print(f"Error Crítico (x2) durante el procesamiento de '{input_json_file}': {e}") import traceback traceback.print_exc() sys.exit(1) # Salir con error si la función principal falla