Compare commits

...

5 Commits

Author SHA1 Message Date
Miguel 31b9cd9701 Ningun Cambio 2025-04-21 00:24:58 +02:00
Miguel 6a06f32176 . 2025-04-20 19:42:59 +02:00
Miguel 60fea74ebf Uso del subdirectorio parsing para una implementacion mas limpia 2025-04-20 19:39:46 +02:00
Miguel 546705f8ca Funciona con UDT y Tags 2025-04-20 17:57:48 +02:00
Miguel 0e68e32b8a Agregado de los parametros de las FC 2025-04-20 13:52:26 +02:00
45 changed files with 2850 additions and 7775 deletions

2
.gitignore vendored
View File

@ -10,6 +10,8 @@ __pycache__/
.Python
XML Project/
XML Proyect/
ToUpload/
XML Project - Doc/
build/
develop-eggs/
dist/

View File

@ -0,0 +1,28 @@
# generators/generate_md_tag_table.py
# -*- coding: utf-8 -*-
def generate_tag_table_markdown(data):
"""Genera contenido Markdown para una tabla de tags."""
md_lines = []
table_name = data.get("block_name", "UnknownTagTable")
tags = data.get("tags", [])
md_lines.append(f"# Tag Table: {table_name}")
md_lines.append("")
if tags:
md_lines.append("| Name | Datatype | Address | Comment |")
md_lines.append("|---|---|---|---|")
for tag in tags:
name = tag.get("name", "N/A")
datatype = tag.get("datatype", "N/A")
address = tag.get("address", "N/A") or " "
comment_raw = tag.get("comment")
comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else ""
md_lines.append(f"| `{name}` | `{datatype}` | `{address}` | {comment} |")
md_lines.append("")
else:
md_lines.append("No tags found in this table.")
md_lines.append("")
return md_lines

View File

@ -0,0 +1,46 @@
# generators/generate_md_udt.py
# -*- coding: utf-8 -*-
import re
from .generator_utils import format_scl_start_value # Importar utilidad necesaria
def generate_markdown_member_rows(members, level=0):
"""Genera filas Markdown para miembros de UDT (recursivo)."""
md_rows = []; prefix = "    " * level
for member in members:
name = member.get("name", "N/A"); datatype = member.get("datatype", "N/A")
start_value_raw = member.get("start_value")
start_value_fmt = format_scl_start_value(start_value_raw, datatype) if start_value_raw is not None else ""
comment_raw = member.get("comment"); comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else ""
md_rows.append(f"| {prefix}`{name}` | `{datatype}` | `{start_value_fmt}` | {comment} |")
children = member.get("children")
if children: md_rows.extend(generate_markdown_member_rows(children, level + 1))
array_elements = member.get("array_elements")
if array_elements:
base_type_for_init = datatype
if isinstance(datatype, str) and datatype.lower().startswith("array["):
match = re.match(r"(Array\[.*\]\s+of\s+)(.*)", datatype, re.IGNORECASE)
if match: base_type_for_init = match.group(2).strip()
md_rows.append(f"| {prefix}  *(Initial Values)* | | | |")
try:
indices_numeric = {int(k): v for k, v in array_elements.items()}
sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())]
except ValueError: sorted_indices_str = sorted(array_elements.keys())
for idx_str in sorted_indices_str:
val_raw = array_elements[idx_str]
val_fmt = format_scl_start_value(val_raw, base_type_for_init) if val_raw is not None else ""
md_rows.append(f"| {prefix}  `[{idx_str}]` | | `{val_fmt}` | |")
return md_rows
def generate_udt_markdown(data):
"""Genera contenido Markdown para un UDT."""
md_lines = []; udt_name = data.get("block_name", "UnknownUDT"); udt_comment = data.get("block_comment", "")
md_lines.append(f"# UDT: {udt_name}"); md_lines.append("")
if udt_comment: md_lines.append(f"**Comment:**"); [md_lines.append(f"> {line}") for line in udt_comment.splitlines()]; md_lines.append("")
members = data.get("interface", {}).get("None", [])
if members:
md_lines.append("## Members"); md_lines.append("")
md_lines.append("| Name | Datatype | Start Value | Comment |"); md_lines.append("|---|---|---|---|")
md_lines.extend(generate_markdown_member_rows(members))
md_lines.append("")
else: md_lines.append("No members found in the UDT interface."); md_lines.append("")
return md_lines

View File

@ -0,0 +1,144 @@
# ToUpload/generators/generate_scl_code_block.py
# -*- coding: utf-8 -*-
import re
import os # Importar os
from .generator_utils import format_variable_name, generate_scl_declarations
SCL_SUFFIX = "_sympy_processed"
# ... (_generate_scl_header sin cambios)...
def _generate_scl_header(data, scl_block_name):
scl_output = []
block_type = data.get("block_type", "Unknown")
block_name = data.get("block_name", "UnknownBlock")
block_number = data.get("block_number")
block_comment = data.get("block_comment", "")
scl_block_keyword = "FUNCTION_BLOCK"
if block_type == "FC": scl_block_keyword = "FUNCTION"
elif block_type == "OB": scl_block_keyword = "ORGANIZATION_BLOCK"
scl_output.append(f"// Block Type: {block_type}")
if block_name != scl_block_name: scl_output.append(f"// Block Name (Original): {block_name}")
if block_number: scl_output.append(f"// Block Number: {block_number}")
original_net_langs = set(n.get("language", "Unknown") for n in data.get("networks", []))
scl_output.append(f"// Original Network Languages: {', '.join(l for l in original_net_langs if l != 'Unknown')}")
if block_comment: scl_output.append(f"// Block Comment:"); [scl_output.append(f"// {line}") for line in block_comment.splitlines()]
scl_output.append("")
if block_type == "FC":
return_type = "Void"; interface_data = data.get("interface", {})
if interface_data.get("Return"):
return_member = interface_data["Return"][0]; return_type_raw = return_member.get("datatype", "Void")
return_type = (return_type_raw[1:-1] if isinstance(return_type_raw, str) and return_type_raw.startswith('"') and return_type_raw.endswith('"') else return_type_raw)
if return_type != return_type_raw and not (isinstance(return_type_raw, str) and return_type_raw.lower().startswith("array")): return_type = f'"{return_type}"'
else: return_type = return_type_raw
scl_output.append(f'{scl_block_keyword} "{scl_block_name}" : {return_type}')
else: scl_output.append(f'{scl_block_keyword} "{scl_block_name}"')
scl_output.append("{ S7_Optimized_Access := 'TRUE' }"); scl_output.append("VERSION : 0.1"); scl_output.append("")
return scl_output
# Modificar _generate_scl_interface para pasar project_root_dir
def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento
"""Genera las secciones VAR_* de la interfaz SCL para FC/FB/OB."""
scl_output = []
section_order = ["Input", "Output", "InOut", "Static", "Temp", "Constant", "Return"] # Incluir Return
declared_temps = set() # Para _generate_scl_temp_vars
for section_name in section_order:
vars_in_section = interface_data.get(section_name, [])
if vars_in_section:
scl_section_keyword = f"VAR_{section_name.upper()}"
end_keyword = "END_VAR"
if section_name == "Static": scl_section_keyword = "VAR_STAT"
if section_name == "Temp": scl_section_keyword = "VAR_TEMP"
if section_name == "Constant": scl_section_keyword = "CONSTANT"; end_keyword = "END_CONSTANT"
if section_name == "Return": scl_section_keyword = "VAR_OUTPUT"; # Retorno va en Output para FB/OB, implícito en FC
# Para FC, la sección Return no se declara explícitamente aquí
if interface_data.get("parent_block_type") == "FC" and section_name == "Return":
continue
scl_output.append(scl_section_keyword)
# Pasar project_root_dir a generate_scl_declarations
scl_output.extend(generate_scl_declarations(vars_in_section, indent_level=1, project_root_dir=project_root_dir)) # <-- Pasar ruta raíz
scl_output.append(end_keyword)
scl_output.append("")
if section_name == "Temp":
declared_temps.update(format_variable_name(v.get("name")) for v in vars_in_section if v.get("name"))
return scl_output, declared_temps
# ... (_generate_scl_temp_vars y _generate_scl_body sin cambios) ...
def _generate_scl_temp_vars(data, declared_temps):
scl_output = []; temp_vars_detected = set(); temp_pattern = re.compile(r'"?(#\w+)"?')
for network in data.get("networks", []):
for instruction in network.get("logic", []):
scl_code = instruction.get("scl", ""); edge_update_code = instruction.get("_edge_mem_update_scl", "")
code_to_scan = (scl_code if scl_code else "") + "\n" + (edge_update_code if edge_update_code else "")
if code_to_scan: found_temps = temp_pattern.findall(code_to_scan); [temp_vars_detected.add(t) for t in found_temps if t]
additional_temps = sorted(list(temp_vars_detected - declared_temps))
if additional_temps:
print(f"INFO: Detectadas {len(additional_temps)} VAR_TEMP adicionales."); temp_section_exists = any("VAR_TEMP" in s for s in data.get("generated_scl", [])) # Check if VAR_TEMP already exists
if not temp_section_exists and not declared_temps: scl_output.append("VAR_TEMP") # Only add if no temps were declared before
for temp_name in additional_temps: scl_name = format_variable_name(temp_name); inferred_type = "Bool"; scl_output.append(f" {scl_name} : {inferred_type}; // Auto-generated temporary")
if not temp_section_exists and not declared_temps: scl_output.append("END_VAR"); scl_output.append("")
return scl_output
def _generate_scl_body(networks):
scl_output = ["BEGIN", ""]; network_logic_added = False
for i, network in enumerate(networks):
network_title = network.get("title", f'Network {network.get("id", i+1)}'); network_comment = network.get("comment", ""); network_lang = network.get("language", "LAD")
scl_output.append(f" // Network {i+1}: {network_title} (Original Language: {network_lang})")
if network_comment: [scl_output.append(f" // {line}") for line in network_comment.splitlines()]
scl_output.append("")
network_has_code = False; logic_in_network = network.get("logic", [])
if not logic_in_network: scl_output.append(f" // Network {i+1} has no logic elements."); scl_output.append(""); continue
if network_lang == "STL":
if logic_in_network and logic_in_network[0].get("type") == "RAW_STL_CHUNK": network_has_code = True; raw_stl_code = logic_in_network[0].get("stl", "// ERROR: STL code missing"); scl_output.append(f" // --- BEGIN STL Network {i+1} ---"); [scl_output.append(f" // {stl_line}") for stl_line in raw_stl_code.splitlines()]; scl_output.append(f" // --- END STL Network {i+1} ---"); scl_output.append("")
else: scl_output.append(f" // ERROR: Contenido STL inesperado en Network {i+1}."); scl_output.append("")
else:
for instruction in logic_in_network:
instruction_type = instruction.get("type", ""); scl_code = instruction.get("scl", ""); is_grouped = instruction.get("grouped", False); edge_update_scl = instruction.get("_edge_mem_update_scl", "")
if is_grouped: continue
code_to_print = []
if scl_code: code_to_print.extend(scl_code.splitlines())
if edge_update_scl: code_to_print.extend(edge_update_scl.splitlines()) # Append edge update SCL
if code_to_print:
is_only_comment = all(line.strip().startswith("//") for line in code_to_print if line.strip())
is_if_block = any(line.strip().startswith("IF") for line in code_to_print)
if not is_only_comment or is_if_block or "_error" in instruction_type or instruction_type in ["UNSUPPORTED_LANG","UNSUPPORTED_CONTENT","PARSING_ERROR", "RAW_SCL_CHUNK"]: # Print RAW_SCL chunks too
network_has_code = True; [scl_output.append(f" {line}") for line in code_to_print]; scl_output.append("")
if not network_has_code and network_lang != "STL": scl_output.append(f" // Network {i+1} did not produce printable SCL code."); scl_output.append("")
if network_has_code: network_logic_added = True # Mark if any network had code
# Add a default comment if no logic was generated at all
if not network_logic_added: scl_output.append(" // No executable logic generated by script."); scl_output.append("")
return scl_output
# Modificar generate_scl_for_code_block para aceptar y pasar project_root_dir
def generate_scl_for_code_block(data, project_root_dir): # <-- Nuevo argumento
"""Genera el contenido SCL completo para un FC/FB/OB."""
scl_output = []
block_type = data.get("block_type", "Unknown")
scl_block_name = format_variable_name(data.get("block_name", "UnknownBlock"))
scl_block_keyword = "FUNCTION_BLOCK" # Default for FB
if block_type == "FC": scl_block_keyword = "FUNCTION"
elif block_type == "OB": scl_block_keyword = "ORGANIZATION_BLOCK"
scl_output.extend(_generate_scl_header(data, scl_block_name))
interface_data = data.get("interface", {})
interface_data['parent_block_type'] = block_type # Ayuda a _generate_scl_interface
# Pasar project_root_dir a _generate_scl_interface
interface_lines, declared_temps = _generate_scl_interface(interface_data, project_root_dir) # <-- Pasar ruta raíz
scl_output.extend(interface_lines)
# Generar VAR_TEMP adicionales (no necesita project_root_dir)
scl_output.extend(_generate_scl_temp_vars(data, declared_temps))
# Generar cuerpo (no necesita project_root_dir)
scl_output.extend(_generate_scl_body(data.get("networks", [])))
scl_output.append(f"END_{scl_block_keyword}")
# Guardar SCL generado en data para _generate_scl_temp_vars
data["generated_scl"] = scl_output
return scl_output

View File

@ -0,0 +1,54 @@
# ToUpload/generators/generate_scl_db.py
# -*- coding: utf-8 -*-
# No necesita importar json/os aquí, lo hará generate_scl_declarations
from .generator_utils import format_variable_name, generate_scl_declarations
# Modificar _generate_scl_header si es necesario, pero parece ok
def _generate_scl_header(data, scl_block_name):
# ... (código sin cambios) ...
scl_output = []
block_type = data.get("block_type", "Unknown")
block_name = data.get("block_name", "UnknownBlock")
block_number = data.get("block_number")
block_comment = data.get("block_comment", "")
scl_output.append(f"// Block Type: {block_type}")
if block_name != scl_block_name: scl_output.append(f"// Block Name (Original): {block_name}")
if block_number: scl_output.append(f"// Block Number: {block_number}")
if block_comment: scl_output.append(f"// Block Comment:"); [scl_output.append(f"// {line}") for line in block_comment.splitlines()]
scl_output.append(""); scl_output.append(f'DATA_BLOCK "{scl_block_name}"'); scl_output.append("{ S7_Optimized_Access := 'TRUE' }")
scl_output.append("VERSION : 0.1"); scl_output.append("")
return scl_output
# Modificar _generate_scl_interface para pasar project_root_dir
def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento
"""Genera la sección VAR para DB (basada en 'Static')."""
scl_output = []
static_vars = interface_data.get("Static", [])
if static_vars:
scl_output.append("VAR")
# Pasar project_root_dir a generate_scl_declarations
scl_output.extend(generate_scl_declarations(static_vars, indent_level=1, project_root_dir=project_root_dir)) # <-- Pasar ruta raíz
scl_output.append("END_VAR")
else:
print("Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB.")
scl_output.append("VAR\nEND_VAR") # Añadir vacío
scl_output.append("")
return scl_output
# Modificar generate_scl_for_db para aceptar y pasar project_root_dir
def generate_scl_for_db(data, project_root_dir): # <-- Nuevo argumento
"""Genera el contenido SCL completo para un DATA_BLOCK."""
scl_output = []
scl_block_name = format_variable_name(data.get("block_name", "UnknownDB"))
scl_output.extend(_generate_scl_header(data, scl_block_name))
interface_data = data.get("interface", {})
# Pasar project_root_dir a _generate_scl_interface
scl_output.extend(_generate_scl_interface(interface_data, project_root_dir)) # <-- Pasar ruta raíz
scl_output.append("BEGIN")
scl_output.append(" // Data Blocks have no executable code")
scl_output.append("END_DATA_BLOCK")
return scl_output

View File

@ -0,0 +1,189 @@
# ToUpload/generators/generator_utils.py
# -*- coding: utf-8 -*-
import re
import os
import json
# --- Importar format_variable_name desde processors ---
try:
from processors.processor_utils import format_variable_name
except ImportError:
print("Advertencia: No se pudo importar 'format_variable_name' desde processors.processor_utils.")
print("Usando una implementación local básica.")
def format_variable_name(name): # Fallback
if not name: return "_INVALID_NAME_"
if name.startswith('"') and name.endswith('"'): return name
prefix = "#" if name.startswith("#") else ""
if prefix: name = name[1:]
if name and name[0].isdigit(): name = "_" + name
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
return prefix + name
# --- Fin Fallback ---
# --- format_scl_start_value (Sin cambios respecto a la versión anterior) ---
def format_scl_start_value(value, datatype):
if value is None: return None
datatype_lower = datatype.lower() if isinstance(datatype, str) else ""
value_str = str(value)
is_complex_type = ('"' in datatype_lower or 'array' in datatype_lower or 'struct' in datatype_lower or datatype_lower not in ["bool", "int", "dint", "sint", "usint", "uint", "udint", "lint", "ulint", "byte", "word", "dword", "lword", "real", "lreal", "time", "ltime", "s5time", "date", "dt", "dtl", "tod", "string", "char", "wstring", "wchar", "variant"])
if is_complex_type:
if isinstance(value, dict): return "/* Array init TBD */"
if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str): return value_str
if value_str == '0': return '0'
if value_str.lower() == 'false': return 'FALSE'
if value_str == "''" or value_str == "": return "''"
# print(f"INFO: Start value '{value_str}' for complex type '{datatype}' skipped.")
return None
value_str_unquoted = value_str;
if value_str.startswith('"') and value_str.endswith('"') and len(value_str) > 1: value_str_unquoted = value_str[1:-1]
elif value_str.startswith("'") and value_str.endswith("'") and len(value_str) > 1: value_str_unquoted = value_str[1:-1]
if any(t in datatype_lower for t in ["int","byte","word","dint","dword","lint","lword","sint","usint","uint","udint","ulint"]):
try: return str(int(value_str_unquoted))
except ValueError: return value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else None
elif "bool" in datatype_lower: val_low = value_str_unquoted.lower(); return "TRUE" if val_low == 'true' or val_low == '1' else ("FALSE" if val_low == 'false' or val_low == '0' else (value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else "FALSE"))
elif "string" in datatype_lower or "char" in datatype_lower: escaped_value = value_str_unquoted.replace("'", "''"); prefix = "WSTRING#" if "wstring" in datatype_lower else ("WCHAR#" if "wchar" in datatype_lower else ""); return f"{prefix}'{escaped_value}'"
elif "real" in datatype_lower or "lreal" in datatype_lower:
try: f_val = float(value_str_unquoted); s_val = "{:.7g}".format(f_val); return s_val + (".0" if "." not in s_val and "e" not in s_val.lower() else "")
except ValueError: return value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else None
elif "time" in datatype_lower:
prefix, val_to_use = "", value_str_unquoted
if val_to_use.upper().startswith("T#"): prefix, val_to_use = "T#", val_to_use[2:]
elif val_to_use.upper().startswith("LT#"): prefix, val_to_use = "LT#", val_to_use[3:]
elif val_to_use.upper().startswith("S5T#"): prefix, val_to_use = "S5T#", val_to_use[4:]
if re.match(r'^-?(\d+d_)?(\d+h_)?(\d+m_)?(\d+s_)?(\d+ms)?$', val_to_use, re.IGNORECASE): target_prefix = "S5T#" if "s5time" in datatype_lower else ("LT#" if "ltime" in datatype_lower else "T#"); return f"{target_prefix}{val_to_use}"
elif re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted): return value_str_unquoted
else: return None
elif any(t in datatype_lower for t in ["date", "dtl", "dt", "tod", "time_of_day"]):
val_to_use = value_str_unquoted; prefix = ""
if val_to_use.upper().startswith("DTL#"): prefix, val_to_use = "DTL#", val_to_use[4:]
elif val_to_use.upper().startswith("D#"): prefix, val_to_use = "D#", val_to_use[2:]
elif val_to_use.upper().startswith("DT#"): prefix, val_to_use = "DT#", val_to_use[3:]
elif val_to_use.upper().startswith("TOD#"): prefix, val_to_use = "TOD#", val_to_use[4:]
target_prefix="DTL#" if "dtl" in datatype_lower or "date_and_time" in datatype_lower else ("DT#" if "dt" in datatype_lower else ("TOD#" if "tod" in datatype_lower or "time_of_day" in datatype_lower else "D#"))
if re.match(r'^\d{4}-\d{2}-\d{2}(-\d{2}:\d{2}:\d{2}(\.\d+)?)?$', val_to_use) or re.match(r'^\d{2}:\d{2}:\d{2}(\.\d+)?$', val_to_use): return f"{target_prefix}{val_to_use}"
elif re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted): return value_str_unquoted
else: return None
else: return value_str if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str) else None
# --- generate_scl_declarations (MODIFICADO para filtrar búsqueda de UDT) ---
def generate_scl_declarations(variables, indent_level=1, project_root_dir=None):
scl_lines = []
indent = " " * indent_level
# Lista de tipos básicos simples (en minúsculas)
basic_types = {"bool", "int", "dint", "sint", "usint", "uint", "udint", "lint", "ulint",
"byte", "word", "dword", "lword", "real", "lreal", "time", "ltime",
"s5time", "date", "dt", "dtl", "tod", "char", "wchar", "variant", "timer", "counter"} # Añadidos Timer/Counter
# Patrones para tipos básicos parametrizados (ignorando mayúsculas/minúsculas)
string_pattern = re.compile(r"^(W?STRING)(\[\s*\d+\s*\])?$", re.IGNORECASE)
array_pattern = re.compile(r'^(Array\[.*\]\s+of\s+)(.*)', re.IGNORECASE)
for var in variables:
var_name_scl = format_variable_name(var.get("name"))
var_dtype_raw = var.get("datatype", "VARIANT")
var_comment = var.get("comment")
start_value = var.get("start_value")
children = var.get("children")
array_elements = var.get("array_elements")
declaration_dtype = var_dtype_raw
base_type_for_init = var_dtype_raw
is_array = False
is_potential_udt = False
udt_name = None
check_type_for_udt = None # El tipo base limpio a comprobar
if isinstance(var_dtype_raw, str):
# 1. Es Array?
array_match = array_pattern.match(var_dtype_raw)
if array_match:
is_array = True
array_prefix_for_decl = array_match.group(1)
base_type_raw = array_match.group(2).strip()
base_type_for_init = base_type_raw
check_type_for_udt = base_type_raw # Comprobar el tipo base
# Determinar declaración SCL para el array
if base_type_raw.startswith('"') and base_type_raw.endswith('"'):
declaration_dtype = f'{array_prefix_for_decl}{base_type_raw}'
check_type_for_udt = base_type_raw[1:-1] # Quitar comillas para comprobar UDT
elif base_type_raw.lower() not in basic_types and \
not string_pattern.match(base_type_raw) and \
base_type_raw.lower() != 'char' and base_type_raw.lower() != 'wchar':
# Asumir UDT si no es básico conocido y no es String[N]/Char
declaration_dtype = f'{array_prefix_for_decl}"{base_type_raw}"' # Poner comillas
else: # Es básico o String[N]/Char
declaration_dtype = f'{array_prefix_for_decl}{base_type_raw}'
else: # 2. No es Array
base_type_for_init = var_dtype_raw
check_type_for_udt = var_dtype_raw # Comprobar el tipo completo
if var_dtype_raw.startswith('"') and var_dtype_raw.endswith('"'):
declaration_dtype = var_dtype_raw
check_type_for_udt = var_dtype_raw[1:-1] # Quitar comillas para comprobar UDT
elif var_dtype_raw.lower() not in basic_types and \
not string_pattern.match(var_dtype_raw) and \
var_dtype_raw.lower() != 'char' and var_dtype_raw.lower() != 'wchar':
# Asumir UDT si no es básico conocido y no es String[N]/Char
declaration_dtype = f'"{var_dtype_raw}"' # Poner comillas
else: # Es básico o String[N]/Char
declaration_dtype = var_dtype_raw
check_type_for_udt = None # No necesita comprobación de UDT
# 3. Comprobación final si es potencial UDT (basado en check_type_for_udt)
if check_type_for_udt:
if check_type_for_udt.lower() not in basic_types and \
not string_pattern.match(check_type_for_udt) and \
check_type_for_udt.lower() != 'char' and check_type_for_udt.lower() != 'wchar':
# Solo si después de limpiar (quitar comillas, etc.) sigue sin ser básico/paramétrico
is_potential_udt = True
udt_name = check_type_for_udt # Nombre limpio del UDT
# --- Buscar archivo de definición SOLO si es_potential_udt ---
if is_potential_udt and udt_name and project_root_dir:
udt_scl_name = format_variable_name(udt_name)
expected_udt_path = os.path.join(project_root_dir, 'PLC data types', 'parsing', f'{udt_scl_name}_processed.json')
relative_udt_path = os.path.relpath(expected_udt_path, project_root_dir)
if os.path.exists(expected_udt_path):
print(f" INFO: Definición UDT '{udt_name}' localizada en: '{relative_udt_path}'")
else:
print(f" WARNING: No se encontró definición para UDT '{udt_name}'. Se esperaba en: '{relative_udt_path}'")
# --- Construir línea de declaración (resto sin cambios) ---
declaration_line = f"{indent}{var_name_scl} : {declaration_dtype}"
init_value_scl_part = ""
if children:
scl_lines.append(declaration_line); scl_lines.append(f"{indent}STRUCT")
scl_lines.extend(generate_scl_declarations(children, indent_level + 1, project_root_dir))
scl_lines.append(f"{indent}END_STRUCT;")
if var_comment: scl_lines[-1] += f" // {var_comment}"
scl_lines.append("")
continue
init_value_scl = None
if is_array and array_elements:
try: indices_numeric = {int(k): v for k, v in array_elements.items()}; sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())]
except ValueError: print(f"Advertencia: Índices array no numéricos para '{var_name_scl}', ordenando como strings."); sorted_indices_str = sorted(array_elements.keys())
init_values = []
for idx_str in sorted_indices_str:
val_info = array_elements[idx_str]
val_raw = val_info.get('value') if isinstance(val_info, dict) else val_info
formatted_val = format_scl_start_value(val_raw, base_type_for_init)
init_values.append(formatted_val if formatted_val is not None else 'NULL')
if init_values: init_value_scl = f"[{', '.join(init_values)}]"
elif not is_array and not children and start_value is not None:
init_value_scl = format_scl_start_value(start_value, base_type_for_init)
if init_value_scl is not None:
init_value_scl_part = f" := {init_value_scl}"
declaration_line += f"{init_value_scl_part};"
if var_comment: declaration_line += f" // {var_comment}"
scl_lines.append(declaration_line)
return scl_lines

View File

View File

@ -0,0 +1,548 @@
# ToUpload/parsers/parse_lad_fbd.py
# -*- coding: utf-8 -*-
from lxml import etree
from collections import defaultdict
import copy
import traceback
# Importar desde las utilidades del parser
from .parser_utils import (
ns,
parse_access,
parse_part,
parse_call,
get_multilingual_text,
)
# Sufijo usado en x2 para identificar instrucciones procesadas (útil para EN/ENO)
SCL_SUFFIX = "_sympy_processed" # Asumimos que este es el sufijo de x2
def parse_lad_fbd_network(network_element):
"""
Parsea una red LAD/FBD/GRAPH, extrae lógica y añade conexiones EN/ENO implícitas.
Devuelve un diccionario representando la red para el JSON.
"""
if network_element is None:
return {
"id": "ERROR",
"title": "Invalid Network Element",
"logic": [],
"error": "Input element was None",
}
network_id = network_element.get("ID")
# Usar get_multilingual_text de utils
title_element = network_element.xpath(
".//iface:MultilingualText[@CompositionName='Title']", namespaces=ns
)
network_title = (
get_multilingual_text(title_element[0])
if title_element
else f"Network {network_id}"
)
comment_element = network_element.xpath(
"./ObjectList/MultilingualText[@CompositionName='Comment']", namespaces=ns
) # OJO: Path relativo a CompileUnit?
if not comment_element: # Intentar path alternativo si el anterior falla
comment_element = network_element.xpath(
".//MultilingualText[@CompositionName='Comment']", namespaces=ns
) # Más genérico dentro de la red
network_comment = (
get_multilingual_text(comment_element[0]) if comment_element else ""
)
# --- Determinar Lenguaje (ya que este parser maneja varios) ---
network_lang = "Unknown"
attr_list_net = network_element.xpath("./AttributeList")
if attr_list_net:
lang_node_net = attr_list_net[0].xpath("./ProgrammingLanguage/text()")
if lang_node_net:
network_lang = lang_node_net[0].strip()
# --- Buscar FlgNet ---
# Buscar NetworkSource y luego FlgNet (ambos usan namespace flg)
network_source_node = network_element.xpath(".//flg:NetworkSource", namespaces=ns)
flgnet = None
if network_source_node:
flgnet_list = network_source_node[0].xpath("./flg:FlgNet", namespaces=ns)
if flgnet_list:
flgnet = flgnet_list[0]
else: # Intentar buscar FlgNet directamente si no hay NetworkSource
flgnet_list = network_element.xpath(".//flg:FlgNet", namespaces=ns)
if flgnet_list:
flgnet = flgnet_list[0]
if flgnet is None:
return {
"id": network_id,
"title": network_title,
"comment": network_comment,
"language": network_lang,
"logic": [],
"error": "FlgNet not found inside NetworkSource or CompileUnit",
}
# 1. Parse Access, Parts, Calls (usan utils)
access_map = {}
# Corregir XPath para buscar Access dentro de FlgNet/Parts
for acc in flgnet.xpath(".//flg:Parts/flg:Access", namespaces=ns):
acc_info = parse_access(acc)
if acc_info and acc_info.get("uid") and "error" not in acc_info.get("type", ""):
access_map[acc_info["uid"]] = acc_info
elif acc_info:
print(
f"Advertencia: Ignorando Access inválido o con error UID={acc_info.get('uid')} en red {network_id}"
)
parts_and_calls_map = {}
# Corregir XPath para buscar Part y Call dentro de FlgNet/Parts
instruction_elements = flgnet.xpath(
".//flg:Parts/flg:Part | .//flg:Parts/flg:Call", namespaces=ns
)
for element in instruction_elements:
parsed_info = None
tag_name = etree.QName(element.tag).localname
if tag_name == "Part":
parsed_info = parse_part(element) # Usa utils
elif tag_name == "Call":
parsed_info = parse_call(element) # Usa utils
if (
parsed_info
and parsed_info.get("uid")
and "error" not in parsed_info.get("type", "")
):
parts_and_calls_map[parsed_info["uid"]] = parsed_info
elif parsed_info:
# Si parse_call/parse_part devolvió error, lo guardamos para tener el UID
print(
f"Advertencia: {tag_name} con error UID={parsed_info.get('uid')} en red {network_id}. Error: {parsed_info.get('error')}"
)
parts_and_calls_map[parsed_info["uid"]] = (
parsed_info # Guardar aunque tenga error
)
# 2. Parse Wires (lógica compleja, mantener aquí)
wire_connections = defaultdict(list) # destination -> [source1, source2]
source_connections = defaultdict(list) # source -> [dest1, dest2]
eno_outputs = defaultdict(list)
qname_powerrail = etree.QName(ns["flg"], "Powerrail")
qname_identcon = etree.QName(
ns["flg"], "IdentCon"
) # Conexión a/desde Access (variable/constante)
qname_namecon = etree.QName(
ns["flg"], "NameCon"
) # Conexión a/desde Part/Call (pin con nombre)
qname_openbranch = etree.QName(
ns["flg"], "Openbranch"
) # Rama abierta (normalmente ignorada o tratada como TRUE?)
qname_opencon = etree.QName(
ns["flg"], "OpenCon"
) # Conexión abierta (pin no conectado)
# Corregir XPath para buscar Wire dentro de FlgNet/Wires
for wire in flgnet.xpath(".//flg:Wires/flg:Wire", namespaces=ns):
children = wire.getchildren()
if len(children) < 2:
continue # Necesita al menos origen y destino
source_elem = children[0]
source_uid, source_pin = None, None
# Determinar origen
if source_elem.tag == qname_powerrail:
source_uid, source_pin = "POWERRAIL", "out"
elif source_elem.tag == qname_identcon: # Origen es una variable/constante
source_uid = source_elem.get("UId")
source_pin = "value" # Salida implícita de un Access
elif source_elem.tag == qname_namecon: # Origen es pin de instrucción
source_uid = source_elem.get("UId")
source_pin = source_elem.get("Name")
elif source_elem.tag == qname_openbranch:
# ¿Cómo manejar OpenBranch como fuente? Podría ser TRUE o una condición OR implícita
source_uid = "OPENBRANCH_" + wire.get(
"UId", "Unknown"
) # UID único para la rama
source_pin = "out"
print(
f"Advertencia: OpenBranch encontrado como fuente en Wire UID={wire.get('UId')} (Red {network_id}). Tratando como fuente especial."
)
# No lo añadimos a parts_and_calls_map, get_sympy_representation necesitará manejarlo
# Ignorar OpenCon como fuente (no tiene sentido)
if source_uid is None or source_pin is None:
# print(f"Advertencia: Fuente de wire inválida o no soportada: {source_elem.tag} en Wire UID={wire.get('UId')}")
continue
source_info = (source_uid, source_pin)
# Procesar destinos
for dest_elem in children[1:]:
dest_uid, dest_pin = None, None
if (
dest_elem.tag == qname_identcon
): # Destino es una variable/constante (asignación)
dest_uid = dest_elem.get("UId")
dest_pin = "value" # Entrada implícita de un Access
elif dest_elem.tag == qname_namecon: # Destino es pin de instrucción
dest_uid = dest_elem.get("UId")
dest_pin = dest_elem.get("Name")
# Ignorar Powerrail, OpenBranch, OpenCon como destinos válidos de conexión lógica principal
if dest_uid is not None and dest_pin is not None:
dest_key = (dest_uid, dest_pin)
if source_info not in wire_connections[dest_key]:
wire_connections[dest_key].append(source_info)
# Mapa inverso: source -> list of destinations
source_key = (source_uid, source_pin)
dest_info = (dest_uid, dest_pin)
if dest_info not in source_connections[source_key]:
source_connections[source_key].append(dest_info)
# Trackear salidas ENO específicamente si la fuente es una instrucción
if source_pin == "eno" and source_uid in parts_and_calls_map:
if dest_info not in eno_outputs[source_uid]:
eno_outputs[source_uid].append(dest_info)
# 3. Build Initial Logic Structure (incorporando errores)
all_logic_steps = {}
# Lista de tipos funcionales (usados para inferencia EN)
# Estos son los tipos *originales* de las instrucciones
functional_block_types = [
"Move",
"Add",
"Sub",
"Mul",
"Div",
"Mod",
"Convert",
"Call", # Call ya está aquí
"TON",
"TOF",
"TP",
"CTU",
"CTD",
"CTUD",
"BLKMOV", # Añadidos
"Se",
"Sd", # Estos son tipos LAD que se mapearán a timers SCL
]
# Lista de generadores RLO (usados para inferencia EN)
rlo_generators = [
"Contact",
"O",
"Eq",
"Ne",
"Gt",
"Lt",
"Ge",
"Le",
"And",
"Xor",
"PBox",
"NBox",
"Not",
]
# Iterar sobre UIDs válidos (los que se pudieron parsear, aunque sea con error)
valid_instruction_uids = list(parts_and_calls_map.keys())
for instruction_uid in valid_instruction_uids:
instruction_info = parts_and_calls_map[instruction_uid]
# Hacer copia profunda para no modificar el mapa original
instruction_repr = copy.deepcopy(instruction_info)
instruction_repr["instruction_uid"] = instruction_uid # Asegurar UID
instruction_repr["inputs"] = {}
instruction_repr["outputs"] = {}
# Si la instrucción ya tuvo un error de parseo, añadirlo aquí
if "error" in instruction_info:
instruction_repr["parsing_error"] = instruction_info["error"]
# No intentar poblar inputs/outputs si el parseo base falló
all_logic_steps[instruction_uid] = instruction_repr
continue
original_type = instruction_repr.get("type", "") # Tipo de la instrucción
# --- Poblar Entradas ---
# Lista base de pines posibles (podría obtenerse de XSDs o dinámicamente)
possible_input_pins = set(["en", "in", "in1", "in2", "pre"])
# Añadir pines dinámicamente basados en el tipo de instrucción
if original_type in ["Contact", "Coil", "SCoil", "RCoil", "SdCoil"]:
possible_input_pins.add("operand")
elif original_type in [
"Add",
"Sub",
"Mul",
"Div",
"Mod",
"Eq",
"Ne",
"Gt",
"Lt",
"Ge",
"Le",
]:
possible_input_pins.update(["in1", "in2"])
elif original_type in ["TON", "TOF", "TP"]:
possible_input_pins.update(["IN", "PT"]) # Pines SCL
elif original_type in ["Se", "Sd"]:
possible_input_pins.update(["s", "tv", "timer"]) # Pines LAD
elif original_type in ["CTU", "CTD", "CTUD"]:
possible_input_pins.update(["CU", "CD", "R", "LD", "PV"]) # Pines SCL/LAD
elif original_type in ["PBox", "NBox"]:
possible_input_pins.update(
["bit", "clk", "in"]
) # PBox/NBox usa 'in' y 'bit'
elif original_type == "BLKMOV":
possible_input_pins.add("SRCBLK")
elif original_type == "Move":
possible_input_pins.add("in")
elif original_type == "Convert":
possible_input_pins.add("in")
elif original_type == "Call":
# Para Calls, los nombres de los parámetros reales se definen en el XML
# El Xpath busca Parameter DENTRO de CallInfo, que está DENTRO de Call
call_xml_element_list = flgnet.xpath(
f".//flg:Parts/flg:Call[@UId='{instruction_uid}']", namespaces=ns
)
if call_xml_element_list:
call_xml_element = call_xml_element_list[0]
call_info_node_list = call_xml_element.xpath(
"./flg:CallInfo", namespaces=ns
)
if call_info_node_list:
call_param_names = call_info_node_list[0].xpath(
"./flg:Parameter/@Name", namespaces=ns
)
possible_input_pins.update(call_param_names)
# print(f"DEBUG Call UID={instruction_uid}: Params={call_param_names}")
else: # Fallback si no hay namespace (menos probable)
call_info_node_list_no_ns = call_xml_element.xpath("./CallInfo")
if call_info_node_list_no_ns:
possible_input_pins.update(
call_info_node_list_no_ns[0].xpath("./Parameter/@Name")
)
# Iterar sobre pines posibles y buscar conexiones
for pin_name in possible_input_pins:
dest_key = (instruction_uid, pin_name)
if dest_key in wire_connections:
sources_list = wire_connections[dest_key]
input_sources_repr = []
for source_uid, source_pin in sources_list:
source_repr = None
if source_uid == "POWERRAIL":
source_repr = {"type": "powerrail"}
elif source_uid.startswith("OPENBRANCH_"):
source_repr = {
"type": "openbranch",
"uid": source_uid,
} # Fuente especial
elif source_uid in access_map:
source_repr = copy.deepcopy(access_map[source_uid])
elif source_uid in parts_and_calls_map:
source_instr_info = parts_and_calls_map[source_uid]
source_repr = {
"type": "connection",
"source_instruction_type": source_instr_info.get(
"type", "Unknown"
), # Usar tipo base
"source_instruction_uid": source_uid,
"source_pin": source_pin,
}
else:
# Fuente desconocida (ni Access, ni Part/Call válido)
print(
f"Advertencia: Fuente desconocida UID={source_uid} conectada a {instruction_uid}.{pin_name}"
)
source_repr = {"type": "unknown_source", "uid": source_uid}
input_sources_repr.append(source_repr)
# Guardar la representación de la entrada (lista o dict)
instruction_repr["inputs"][pin_name] = (
input_sources_repr[0]
if len(input_sources_repr) == 1
else input_sources_repr
)
# --- Poblar Salidas (simplificado: solo conexiones a Access) ---
possible_output_pins = set(
[
"out",
"out1",
"Q",
"q",
"eno",
"RET_VAL",
"DSTBLK",
"rt",
"cv",
"QU",
"QD",
"ET", # Añadir pines de salida estándar SCL
]
)
if original_type == "BLKMOV":
possible_output_pins.add("DSTBLK")
if (
original_type == "Call"
): # Para Calls, las salidas dependen del bloque llamado
call_xml_element_list = flgnet.xpath(
f".//flg:Parts/flg:Call[@UId='{instruction_uid}']", namespaces=ns
)
if call_xml_element_list:
call_info_node_list = call_xml_element_list[0].xpath(
"./flg:CallInfo", namespaces=ns
)
if call_info_node_list:
# Buscar parámetros con Section="Output" o "InOut" o "Return"
output_param_names = call_info_node_list[0].xpath(
"./flg:Parameter[@Section='Output' or @Section='InOut' or @Section='Return']/@Name",
namespaces=ns,
)
possible_output_pins.update(output_param_names)
for pin_name in possible_output_pins:
source_key = (instruction_uid, pin_name)
if source_key in source_connections:
if pin_name not in instruction_repr["outputs"]:
instruction_repr["outputs"][pin_name] = []
for dest_uid, dest_pin in source_connections[source_key]:
if (
dest_uid in access_map
): # Solo registrar si va a una variable/constante
dest_operand_copy = copy.deepcopy(access_map[dest_uid])
if (
dest_operand_copy
not in instruction_repr["outputs"][pin_name]
):
instruction_repr["outputs"][pin_name].append(
dest_operand_copy
)
all_logic_steps[instruction_uid] = instruction_repr
# 4. Inferencia EN (modificado para usar tipos originales)
processed_blocks_en_inference = set()
try:
# Ordenar UIDs numéricamente si es posible
sorted_uids_for_en = sorted(
all_logic_steps.keys(),
key=lambda x: (
int(x) if isinstance(x, str) and x.isdigit() else float("inf")
),
)
except ValueError:
sorted_uids_for_en = sorted(all_logic_steps.keys()) # Fallback sort
ordered_logic_list_for_en = [
all_logic_steps[uid] for uid in sorted_uids_for_en if uid in all_logic_steps
]
for i, instruction in enumerate(ordered_logic_list_for_en):
part_uid = instruction["instruction_uid"]
# Usar el tipo original para la lógica de inferencia
part_type_original = (
instruction.get("type", "").replace(SCL_SUFFIX, "").replace("_error", "")
)
# Inferencia solo para tipos funcionales que no tengan EN explícito
if (
part_type_original in functional_block_types
and "en" not in instruction.get("inputs", {})
and part_uid not in processed_blocks_en_inference
and "error" not in part_type_original
): # No inferir para errores
inferred_en_source = None
# Buscar hacia atrás en la lista ordenada
if i > 0:
for j in range(i - 1, -1, -1):
prev_instr = ordered_logic_list_for_en[j]
if "error" in prev_instr.get("type", ""):
continue # Saltar errores previos
prev_uid = prev_instr["instruction_uid"]
prev_type_original = (
prev_instr.get("type", "")
.replace(SCL_SUFFIX, "")
.replace("_error", "")
)
if prev_type_original in rlo_generators: # Fuente RLO encontrada
inferred_en_source = {
"type": "connection",
"source_instruction_uid": prev_uid,
"source_instruction_type": prev_type_original, # Tipo original
"source_pin": "out",
}
break # Detener búsqueda
elif (
prev_type_original in functional_block_types
): # Bloque funcional previo
# Comprobar si este bloque tiene salida ENO conectada
if (prev_uid, "eno") in source_connections:
inferred_en_source = {
"type": "connection",
"source_instruction_uid": prev_uid,
"source_instruction_type": prev_type_original, # Tipo original
"source_pin": "eno",
}
# Si no tiene ENO conectado, el flujo RLO se detiene aquí
break # Detener búsqueda
elif prev_type_original in [
"Coil",
"SCoil",
"RCoil",
"SdCoil",
"SetCoil",
"ResetCoil",
]:
# Bobinas terminan el flujo RLO
break # Detener búsqueda
# Si no se encontró fuente, conectar a PowerRail
if inferred_en_source is None:
inferred_en_source = {"type": "powerrail"}
# Actualizar la instrucción EN el diccionario principal
if part_uid in all_logic_steps:
# Asegurar que inputs exista
if "inputs" not in all_logic_steps[part_uid]:
all_logic_steps[part_uid]["inputs"] = {}
all_logic_steps[part_uid]["inputs"]["en"] = inferred_en_source
processed_blocks_en_inference.add(part_uid)
# 5. Lógica ENO (añadir destinos ENO si existen)
for source_instr_uid, eno_destinations in eno_outputs.items():
if source_instr_uid in all_logic_steps and "error" not in all_logic_steps[
source_instr_uid
].get("type", ""):
all_logic_steps[source_instr_uid]["eno_destinations"] = eno_destinations
# 6. Ordenar y Devolver
final_logic_list = [
all_logic_steps[uid] for uid in sorted_uids_for_en if uid in all_logic_steps
]
return {
"id": network_id,
"title": network_title,
"comment": network_comment,
"language": network_lang, # Lenguaje original de la red
"logic": final_logic_list,
# No añadir 'error' aquí a menos que el parseo completo falle
}
# --- Función de Información del Parser ---
def get_parser_info():
"""Devuelve la información para este parser."""
# Este parser maneja LAD, FBD y GRAPH
return {
"language": ["LAD", "FBD", "GRAPH"], # Lista de lenguajes soportados
"parser_func": parse_lad_fbd_network, # Función a llamar
}

View File

@ -0,0 +1,253 @@
# ToUpload/parsers/parse_scl.py
# -*- coding: utf-8 -*-
from lxml import etree
import re
# Importar desde las utilidades del parser
from .parser_utils import ns, get_multilingual_text
def reconstruct_scl_from_tokens(st_node):
"""
Reconstruye SCL desde <StructuredText>, mejorando el manejo de
variables, constantes literales, tokens básicos, espacios y saltos de línea.
"""
if st_node is None:
return "// Error: StructuredText node not found.\n"
scl_parts = []
# Usar st:* para obtener todos los elementos hijos dentro del namespace st
children = st_node.xpath("./st:*", namespaces=ns)
for elem in children:
tag = etree.QName(elem.tag).localname
if tag == "Token":
scl_parts.append(elem.get("Text", ""))
elif tag == "Blank":
# Añadir espacios solo si es necesario o más de uno
num_spaces = int(elem.get("Num", 1))
if not scl_parts or not scl_parts[-1].endswith(" "):
scl_parts.append(" " * num_spaces)
elif num_spaces > 1:
scl_parts.append(" " * (num_spaces -1))
elif tag == "NewLine":
# Quitar espacios finales antes del salto de línea
if scl_parts:
scl_parts[-1] = scl_parts[-1].rstrip()
scl_parts.append("\n")
elif tag == "Access":
scope = elem.get("Scope")
access_str = f"/*_ERR_Scope_{scope}_*/" # Placeholder
# --- Variables ---
if scope in [
"GlobalVariable", "LocalVariable", "TempVariable", "InOutVariable",
"InputVariable", "OutputVariable", "ConstantVariable",
"GlobalConstant", "LocalConstant" # Añadir constantes simbólicas
]:
symbol_elem = elem.xpath("./st:Symbol", namespaces=ns)
if symbol_elem:
components = symbol_elem[0].xpath("./st:Component", namespaces=ns)
symbol_text_parts = []
for i, comp in enumerate(components):
name = comp.get("Name", "_ERR_COMP_")
if i > 0: symbol_text_parts.append(".")
# Check for HasQuotes attribute (adjust namespace if needed)
# El atributo está en el Component o en el Access padre? Probar ambos
has_quotes_comp = comp.get("HasQuotes", "false").lower() == "true" # Check directly on Component
has_quotes_access = False
access_parent = comp.xpath("ancestor::st:Access[1]", namespaces=ns) # Get immediate Access parent
if access_parent:
has_quotes_attr = access_parent[0].xpath("./st:BooleanAttribute[@Name='HasQuotes']/text()", namespaces=ns)
has_quotes_access = has_quotes_attr and has_quotes_attr[0].lower() == 'true'
has_quotes = has_quotes_comp or has_quotes_access
is_temp = name.startswith("#")
# Apply quotes based on HasQuotes or if it's the first component and not temp
if has_quotes or (i == 0 and not is_temp and '"' not in name): # Avoid double quotes
symbol_text_parts.append(f'"{name}"')
else:
symbol_text_parts.append(name)
# --- Array Index Access ---
index_access_nodes = comp.xpath("./st:Access", namespaces=ns)
if index_access_nodes:
# Llamada recursiva para cada índice
indices_text = [reconstruct_scl_from_tokens(idx_node) for idx_node in index_access_nodes]
# Limpiar saltos de línea dentro de los corchetes
indices_cleaned = [idx.replace('\n', '').strip() for idx in indices_text]
symbol_text_parts.append(f"[{','.join(indices_cleaned)}]")
access_str = "".join(symbol_text_parts)
else:
access_str = f"/*_ERR_NO_SYMBOL_IN_{scope}_*/"
# --- Constantes Literales ---
elif scope == "LiteralConstant":
constant_elem = elem.xpath("./st:Constant", namespaces=ns)
if constant_elem:
val_elem = constant_elem[0].xpath("./st:ConstantValue/text()", namespaces=ns)
type_elem = constant_elem[0].xpath("./st:ConstantType/text()", namespaces=ns)
const_type = type_elem[0].strip().lower() if type_elem and type_elem[0] is not None else ""
const_val = val_elem[0].strip() if val_elem and val_elem[0] is not None else "_ERR_CONSTVAL_"
# Formatear según tipo
if const_type == "bool": access_str = const_val.upper()
elif const_type.lower() == "string":
replaced_val = const_val.replace("'", "''")
access_str = f"'{replaced_val}'"
elif const_type.lower() == "char":
replaced_val = const_val.replace("'", "''")
access_str = f"'{replaced_val}'"
elif const_type == "wstring":
replaced_val = const_val.replace("'", "''")
access_str = f"WSTRING#'{replaced_val}'"
elif const_type == "wchar":
replaced_val = const_val.replace("'", "''")
access_str = f"WCHAR#'{replaced_val}'"
elif const_type == "time": access_str = f"T#{const_val}"
elif const_type == "ltime": access_str = f"LT#{const_val}"
elif const_type == "s5time": access_str = f"S5T#{const_val}"
elif const_type == "date": access_str = f"D#{const_val}"
elif const_type == "dtl": access_str = f"DTL#{const_val}"
elif const_type == "dt": access_str = f"DT#{const_val}"
elif const_type == "tod": access_str = f"TOD#{const_val}"
elif const_type in ["int", "dint", "sint", "usint", "uint", "udint", "real", "lreal", "word", "dword", "byte"]:
# Añadir .0 para reales si no tienen decimal
if const_type in ["real", "lreal"] and '.' not in const_val and 'e' not in const_val.lower():
access_str = f"{const_val}.0"
else:
access_str = const_val
else: # Otros tipos (LWORD, etc.) o desconocidos
access_str = const_val
else:
access_str = "/*_ERR_NOCONST_*/"
# --- Llamadas a Funciones/Bloques (Scope=Call) ---
elif scope == "Call":
call_info_node = elem.xpath("./st:CallInfo", namespaces=ns)
if call_info_node:
ci = call_info_node[0]
call_name = ci.get("Name", "_ERR_CALLNAME_")
call_type = ci.get("BlockType") # FB, FC, etc.
# Parámetros (están como Access o Token dentro de CallInfo/Parameter)
params = ci.xpath("./st:Parameter", namespaces=ns)
param_parts = []
for p in params:
p_name = p.get("Name", "_ERR_PARAMNAME_")
# El valor del parámetro está dentro del nodo Parameter
p_value_node = p.xpath("./st:Access | ./st:Token", namespaces=ns) # Buscar Access o Token
p_value_scl = ""
if p_value_node:
p_value_scl = reconstruct_scl_from_tokens(p) # Parsear el contenido del parámetro
p_value_scl = p_value_scl.replace('\n', '').strip() # Limpiar SCL resultante
param_parts.append(f"{p_name} := {p_value_scl}")
# Manejar FB vs FC
if call_type == "FB":
instance_node = ci.xpath("./st:Instance/st:Component/@Name", namespaces=ns)
if instance_node:
instance_name = f'"{instance_node[0]}"'
access_str = f"{instance_name}({', '.join(param_parts)})"
else: # FB sin instancia? Podría ser STAT
access_str = f'"{call_name}"({", ".join(param_parts)}) (* FB sin instancia explícita? *)'
elif call_type == "FC":
access_str = f'"{call_name}"({", ".join(param_parts)})'
else: # Otros tipos de llamada
access_str = f'"{call_name}"({", ".join(param_parts)}) (* Tipo: {call_type} *)'
else:
access_str = "/*_ERR_NO_CALLINFO_*/"
# Añadir más scopes si son necesarios (e.g., Address, Label, Reference)
scl_parts.append(access_str)
elif tag == "Comment" or tag == "LineComment":
# Usar get_multilingual_text del parser_utils
comment_text = get_multilingual_text(elem)
if tag == "Comment":
scl_parts.append(f"(* {comment_text} *)")
else:
scl_parts.append(f"// {comment_text}")
# Ignorar otros tipos de nodos si no son relevantes para el SCL
full_scl = "".join(scl_parts)
# --- Re-indentación Simple ---
output_lines = []
indent_level = 0
indent_str = " " # Dos espacios
for line in full_scl.splitlines():
trimmed_line = line.strip()
if not trimmed_line:
# Mantener líneas vacías? Opcional.
# output_lines.append("")
continue
# Reducir indentación ANTES de imprimir para END, ELSE, etc.
if trimmed_line.upper().startswith(("END_", "UNTIL", "}")) or \
trimmed_line.upper() in ["ELSE", "ELSIF"]:
indent_level = max(0, indent_level - 1)
output_lines.append(indent_str * indent_level + trimmed_line)
# Aumentar indentación DESPUÉS de imprimir para IF, FOR, etc.
# Ser más específico con las palabras clave que aumentan indentación
# Usar .upper() para ignorar mayúsculas/minúsculas
line_upper = trimmed_line.upper()
if line_upper.endswith(("THEN", "DO", "OF", "{")) or \
line_upper.startswith(("IF ", "FOR ", "WHILE ", "CASE ", "REPEAT", "STRUCT")) or \
line_upper == "ELSE":
# Excepción: No indentar después de ELSE IF
if not (line_upper == "ELSE" and "IF" in output_lines[-1].upper()):
indent_level += 1
return "\n".join(output_lines)
def parse_scl_network(network_element):
"""
Parsea una red SCL extrayendo el código fuente reconstruido.
Devuelve un diccionario representando la red para el JSON.
"""
network_id = network_element.get("ID", "UnknownSCL_ID")
network_lang = "SCL" # Sabemos que es SCL
# Buscar NetworkSource y luego StructuredText
network_source_node = network_element.xpath(".//flg:NetworkSource", namespaces=ns)
structured_text_node = None
if network_source_node:
structured_text_node_list = network_source_node[0].xpath("./st:StructuredText", namespaces=ns)
if structured_text_node_list:
structured_text_node = structured_text_node_list[0]
reconstructed_scl = "// SCL extraction failed: StructuredText node not found.\n"
if structured_text_node is not None:
reconstructed_scl = reconstruct_scl_from_tokens(structured_text_node)
# Crear la estructura de datos para la red
parsed_network_data = {
"id": network_id,
"language": network_lang,
"logic": [ # SCL se guarda como un único bloque lógico
{
"instruction_uid": f"SCL_{network_id}", # UID sintético
"type": "RAW_SCL_CHUNK", # Tipo especial para SCL crudo
"scl": reconstructed_scl, # El código SCL reconstruido
}
],
# No añadimos error aquí, reconstruct_scl_from_tokens ya incluye comentarios de error
}
return parsed_network_data
# --- Función de Información del Parser ---
def get_parser_info():
"""Devuelve la información para este parser."""
return {
'language': ['SCL'], # Lista de lenguajes soportados
'parser_func': parse_scl_network # Función a llamar
}

View File

@ -0,0 +1,278 @@
# ToUpload/parsers/parse_stl.py
# -*- coding: utf-8 -*-
from lxml import etree
# Importar desde las utilidades del parser
from .parser_utils import ns # Solo necesitamos los namespaces aquí
# --- Funciones Auxiliares de Reconstrucción STL (Adaptadas de x1) ---
def get_access_text_stl(access_element):
"""Reconstruye una representación textual simple de un Access en STL."""
if access_element is None: return "_ERR_ACCESS_"
scope = access_element.get("Scope")
# Símbolo (Variable, Constante Simbólica)
symbol_elem = access_element.xpath("./stl:Symbol", namespaces=ns)
if symbol_elem:
components = symbol_elem[0].xpath("./stl:Component", namespaces=ns)
parts = []
for i, comp in enumerate(components):
name = comp.get("Name", "_ERR_COMP_")
# Comprobar HasQuotes (en Access padre?)
has_quotes_elem = comp.xpath("ancestor::stl:Access/stl:BooleanAttribute[@Name='HasQuotes']/text()", namespaces=ns)
has_quotes = has_quotes_elem and has_quotes_elem[0].lower() == "true"
is_temp = name.startswith("#")
if i > 0: parts.append(".")
# Aplicar comillas
if has_quotes or (i == 0 and not is_temp and '"' not in name):
parts.append(f'"{name}"')
else:
parts.append(name)
# Índices de Array
index_access = comp.xpath("./stl:Access", namespaces=ns)
if index_access:
indices = [get_access_text_stl(ia) for ia in index_access]
parts.append(f"[{','.join(indices)}]")
return "".join(parts)
# Constante Literal
constant_elem = access_element.xpath("./stl:Constant", namespaces=ns)
if constant_elem:
val_elem = constant_elem[0].xpath("./stl:ConstantValue/text()", namespaces=ns)
type_elem = constant_elem[0].xpath("./stl:ConstantType/text()", namespaces=ns)
const_type = (type_elem[0].strip().lower() if type_elem and type_elem[0] is not None else "")
const_val = (val_elem[0].strip() if val_elem and val_elem[0] is not None else "_ERR_CONST_")
# Añadir prefijos estándar STL
if const_type == "time": return f"T#{const_val}"
if const_type == "s5time": return f"S5T#{const_val}"
if const_type == "date": return f"D#{const_val}"
if const_type == "dt": return f"DT#{const_val}"
if const_type == "time_of_day" or const_type=="tod": return f"TOD#{const_val}"
if const_type.lower() == "string":
replaced_val = const_val.replace("'", "''")
access_str = f"'{replaced_val}'"
if const_type.lower() == "char":
replaced_val = const_val.replace("'", "''")
access_str = f"'{replaced_val}'"
if const_type == "wstring":
replaced_val = const_val.replace("'", "''")
access_str = f"WSTRING#'{replaced_val}'"
if const_type == "wchar":
replaced_val = const_val.replace("'", "''")
access_str = f"WCHAR#'{replaced_val}'" # Añadir más si es necesario (WSTRING#, BYTE#, WORD#...)
if const_type == "byte" and const_val.startswith("16#"): return f"B#{const_val}" # Formato B#16#FF
if const_type == "word" and const_val.startswith("16#"): return f"W#{const_val}"
if const_type == "dword" and const_val.startswith("16#"): return f"DW#{const_val}"
# Real con punto decimal
if const_type == "real" and '.' not in const_val and 'e' not in const_val.lower(): return f"{const_val}.0"
return const_val # Valor por defecto
# Etiqueta
label_elem = access_element.xpath("./stl:Label", namespaces=ns)
if label_elem:
return label_elem[0].get("Name", "_ERR_LABEL_")
# Acceso Indirecto (Punteros)
indirect_elem = access_element.xpath("./stl:Indirect", namespaces=ns)
if indirect_elem:
reg = indirect_elem[0].get("Register", "AR?") # AR1, AR2
offset_str = indirect_elem[0].get("BitOffset", "0")
area = indirect_elem[0].get("Area", "DB") # DB, DI, L, etc.
width = indirect_elem[0].get("Width", "X") # Bit, Byte, Word, Double
try:
bit_offset = int(offset_str)
byte_offset = bit_offset // 8
bit_in_byte = bit_offset % 8
p_format_offset = f"P#{byte_offset}.{bit_in_byte}"
except ValueError:
p_format_offset = "P#?.?"
width_map = {"Bit": "X", "Byte": "B", "Word": "W", "Double": "D", "Long": "D"}
width_char = width_map.get(width, width[0] if width else "?")
return f"{area}{width_char}[{reg},{p_format_offset}]"
# Dirección Absoluta (I, Q, M, PI, PQ, T, C, DBX, DIX, L)
address_elem = access_element.xpath("./stl:Address", namespaces=ns)
if address_elem:
area = address_elem[0].get("Area", "??") # Input, Output, Memory, DB, DI, Local, Timer, Counter...
bit_offset_str = address_elem[0].get("BitOffset", "0")
addr_type_str = address_elem[0].get("Type", "Bool") # Bool, Byte, Word, DWord, Int, DInt, Real...
try:
bit_offset = int(bit_offset_str)
byte_offset = bit_offset // 8
bit_in_byte = bit_offset % 8
# Determinar ancho (X, B, W, D)
addr_width = "X" # Default bit
if addr_type_str in ["Byte", "SInt", "USInt"]: addr_width = "B"
elif addr_type_str in ["Word", "Int", "UInt"]: addr_width = "W"
elif addr_type_str in ["DWord", "DInt", "UDInt", "Real", "Time", "DT", "TOD"]: addr_width = "D"
elif addr_type_str in ["LReal", "LTime", "LWord", "LInt", "ULInt"]: addr_width = "D" # L se maneja como D en direccionamiento base? O usar L? Chequear estándar. STL clásico no tenía L.
# Mapear Área XML a Área STL
area_map = {"Input": "I", "Output": "Q", "Memory": "M",
"PeripheryInput": "PI", "PeripheryOutput": "PQ",
"DB": "DB", "DI": "DI", "Local": "L",
"Timer": "T", "Counter": "C"}
stl_area = area_map.get(area, area)
if stl_area in ["DB", "DI"]:
block_num = address_elem[0].get("BlockNumber") # Para DB10.DBX0.0
if block_num:
return f"{stl_area}{block_num}.{stl_area}{addr_width}{byte_offset}.{bit_in_byte}"
else: # Para acceso con registro DB/DI (DBX, DIW, etc.)
return f"{stl_area}{addr_width}{byte_offset}.{bit_in_byte}"
elif stl_area in ["T", "C"]:
return f"{stl_area}{byte_offset}" # T 5, C 10 (offset es el número)
else: # I, Q, M, L, PI, PQ
return f"{stl_area}{addr_width}{byte_offset}.{bit_in_byte}" # M10.1, IW0, QB5, etc.
except ValueError:
return f"{area}?{bit_offset_str}?"
# CallInfo (para CALL FC10, CALL FB20, DB10)
call_info_elem = access_element.xpath("./stl:CallInfo", namespaces=ns)
if call_info_elem:
name = call_info_elem[0].get("Name", "_ERR_CALL_")
btype = call_info_elem[0].get("BlockType", "FC") # FC, FB, DB
instance_node = call_info_elem[0].xpath("./stl:Instance/stl:Component/@Name", namespaces=ns)
if btype == "FB" and instance_node:
# Para CALL FB, el operando es el DB de instancia
db_name_raw = instance_node[0]
return f'"{db_name_raw}"' if '"' not in db_name_raw else db_name_raw
elif btype == "DB":
return f'DB "{name}"' # O solo DB name? ej. DB10
else: # FC
return f'{btype} "{name}"' # FC "Nombre"
return f"_{scope}_?" # Fallback
def get_comment_text_stl(comment_element):
"""Extrae texto de un LineComment o Comment para STL."""
if comment_element is None: return ""
# STL Comments suelen tener <Text> directamente
text_nodes = comment_element.xpath("./stl:Text/text()", namespaces=ns)
if text_nodes:
return text_nodes[0].strip()
return "" # Vacío si no hay <Text>
def reconstruct_stl_from_statementlist(statement_list_node):
"""Reconstruye el código STL como una cadena de texto desde <StatementList>."""
if statement_list_node is None:
return "// Error: StatementList node not found.\n"
stl_lines = []
statements = statement_list_node.xpath("./stl:StlStatement", namespaces=ns)
for stmt in statements:
line_parts = []
inline_comment = "" # Comentarios en la misma línea
# 1. Comentarios iniciales (línea completa //)
initial_comments = stmt.xpath("child::stl:Comment[not(@Inserted='true')] | child::stl:LineComment[not(@Inserted='true')]", namespaces=ns)
for comm in initial_comments:
comment_text = get_comment_text_stl(comm)
if comment_text:
for comment_line in comment_text.splitlines():
stl_lines.append(f"// {comment_line}")
# 2. Etiqueta (Label)
label_decl = stmt.xpath("./stl:LabelDeclaration", namespaces=ns)
label_str = ""
if label_decl:
label_name = label_decl[0].xpath("./stl:Label/@Name", namespaces=ns)
if label_name:
label_str = f"{label_name[0]}:"
# Comentarios después de la etiqueta (inline)
label_comments = label_decl[0].xpath("./stl:Comment[@Inserted='true'] | ./stl:LineComment[@Inserted='true']", namespaces=ns)
for lcomm in label_comments:
inline_comment += f" // {get_comment_text_stl(lcomm)}"
if label_str:
line_parts.append(label_str)
# 3. Instrucción (StlToken)
instruction_token = stmt.xpath("./stl:StlToken", namespaces=ns)
instruction_str = ""
if instruction_token:
token_text = instruction_token[0].get("Text", "_ERR_TOKEN_")
if token_text == "EMPTY_LINE":
stl_lines.append("") # Línea vacía
continue # Saltar resto del statement
elif token_text == "COMMENT": # Marcador de línea de comentario completo
# Ya manejado por initial_comments? Verificar XML. Si no, extraer comentario aquí.
pass # Asumir manejado antes
else:
instruction_str = token_text
# Comentarios asociados al token (inline)
token_comments = instruction_token[0].xpath("./stl:Comment[@Inserted='true'] | ./stl:LineComment[@Inserted='true']", namespaces=ns)
for tcomm in token_comments:
inline_comment += f" // {get_comment_text_stl(tcomm)}"
if instruction_str:
# Añadir tabulación si hay etiqueta
line_parts.append("\t" + instruction_str if label_str else instruction_str)
# 4. Operando (Access)
access_elem = stmt.xpath("./stl:Access", namespaces=ns)
access_str = ""
if access_elem:
access_text = get_access_text_stl(access_elem[0])
access_str = access_text
# Comentarios dentro del Access (inline)
access_comments = access_elem[0].xpath("child::stl:Comment[@Inserted='true'] | child::stl:LineComment[@Inserted='true']", namespaces=ns)
for acc_comm in access_comments:
inline_comment += f" // {get_comment_text_stl(acc_comm)}"
if access_str:
line_parts.append(access_str)
# Construir línea final
current_line = " ".join(lp for lp in line_parts if lp) # Unir partes con espacio
if inline_comment:
current_line += f"\t{inline_comment.strip()}" # Añadir comentario con tab
if current_line.strip(): # Añadir solo si no está vacía después de todo
stl_lines.append(current_line.rstrip()) # Quitar espacios finales
return "\n".join(stl_lines)
def parse_stl_network(network_element):
"""
Parsea una red STL extrayendo el código fuente reconstruido.
Devuelve un diccionario representando la red para el JSON.
"""
network_id = network_element.get("ID", "UnknownSTL_ID")
network_lang = "STL"
# Buscar NetworkSource y luego StatementList
network_source_node = network_element.xpath(".//flg:NetworkSource", namespaces=ns)
statement_list_node = None
if network_source_node:
statement_list_node_list = network_source_node[0].xpath("./stl:StatementList", namespaces=ns)
if statement_list_node_list:
statement_list_node = statement_list_node_list[0]
reconstructed_stl = "// STL extraction failed: StatementList node not found.\n"
if statement_list_node is not None:
reconstructed_stl = reconstruct_stl_from_statementlist(statement_list_node)
# Crear la estructura de datos para la red
parsed_network_data = {
"id": network_id,
"language": network_lang,
"logic": [ # STL se guarda como un único bloque lógico
{
"instruction_uid": f"STL_{network_id}", # UID sintético
"type": "RAW_STL_CHUNK", # Tipo especial para STL crudo
"stl": reconstructed_stl, # El código STL reconstruido
}
],
}
return parsed_network_data
# --- Función de Información del Parser ---
def get_parser_info():
"""Devuelve la información para este parser."""
return {
'language': ['STL'], # Lenguaje soportado
'parser_func': parse_stl_network # Función a llamar
}

View File

@ -0,0 +1,387 @@
# ToUpload/parsers/parser_utils.py
# -*- coding: utf-8 -*-
from lxml import etree
import traceback
# --- Namespaces (Común para muchos parsers) ---
ns = {
"iface": "http://www.siemens.com/automation/Openness/SW/Interface/v5",
"flg": "http://www.siemens.com/automation/Openness/SW/NetworkSource/FlgNet/v4",
"st": "http://www.siemens.com/automation/Openness/SW/NetworkSource/StructuredText/v3",
"stl": "http://www.siemens.com/automation/Openness/SW/NetworkSource/StatementList/v4",
}
# --- Funciones Comunes de Extracción de Texto y Nodos ---
def get_multilingual_text(element, default_lang="en-US", fallback_lang="it-IT"):
"""Extrae texto multilingüe de un elemento XML, asegurando devolver siempre string."""
if element is None:
return "" # Devolver cadena vacía si el elemento es None
try:
# Intenta buscar el idioma por defecto
xpath_expr_default = f".//iface:MultilingualTextItem[iface:AttributeList/iface:Culture='{default_lang}']/iface:AttributeList/iface:Text"
text_items_default = element.xpath(xpath_expr_default, namespaces=ns)
# CORRECCIÓN: Devolver "" si .text es None
if text_items_default and text_items_default[0].text is not None:
return text_items_default[0].text.strip()
# Intentar buscar el idioma de fallback
xpath_expr_fallback = f".//iface:MultilingualTextItem[iface:AttributeList/iface:Culture='{fallback_lang}']/iface:AttributeList/iface:Text"
text_items_fallback = element.xpath(xpath_expr_fallback, namespaces=ns)
# CORRECCIÓN: Devolver "" si .text es None
if text_items_fallback and text_items_fallback[0].text is not None:
return text_items_fallback[0].text.strip()
# Si no encuentra ninguno, toma el primer texto que encuentre
xpath_expr_any = ".//iface:MultilingualTextItem/iface:AttributeList/iface:Text"
text_items_any = element.xpath(xpath_expr_any, namespaces=ns)
# CORRECCIÓN: Devolver "" si .text es None
if text_items_any and text_items_any[0].text is not None:
return text_items_any[0].text.strip()
# Fallback final si no se encontró ningún MultilingualTextItem con texto
return "" # Asegurar retorno de string vacío
except Exception as e:
print(f"Advertencia: Error extrayendo MultilingualText: {e}")
# traceback.print_exc() # Descomentar para más detalles del error
return "" # Devolver cadena vacía en caso de excepción
def get_symbol_name(symbol_element):
"""Obtiene el nombre completo de un símbolo desde un elemento <flg:Symbol>."""
if symbol_element is None:
return None
try:
components = symbol_element.xpath("./flg:Component/@Name", namespaces=ns)
return (
".".join(
f'"{c}"' if not c.startswith("#") and '"' not in c else c
for c in components
)
if components
else None
)
except Exception as e:
print(f"Advertencia: Excepción en get_symbol_name: {e}")
return None
def parse_access(access_element):
"""Parsea un nodo <flg:Access> devolviendo un diccionario con su información."""
if access_element is None:
return None
uid = access_element.get("UId")
scope = access_element.get("Scope")
info = {"uid": uid, "scope": scope, "type": "unknown"}
symbol = access_element.xpath("./flg:Symbol", namespaces=ns)
constant = access_element.xpath("./flg:Constant", namespaces=ns)
if symbol:
info["type"] = "variable"
info["name"] = get_symbol_name(symbol[0])
if info["name"] is None:
info["type"] = "error_parsing_symbol"
print(f"Error: No se pudo parsear nombre símbolo Access UID={uid}")
raw_text = "".join(symbol[0].xpath(".//text()")).strip()
info["name"] = (
f'"_ERR_PARSING_{raw_text[:20]}"'
if raw_text
else f'"_ERR_PARSING_EMPTY_SYMBOL_ACCESS_{uid}"'
)
elif constant:
info["type"] = "constant"
const_type_elem = constant[0].xpath("./flg:ConstantType", namespaces=ns)
const_val_elem = constant[0].xpath("./flg:ConstantValue", namespaces=ns)
info["datatype"] = (
const_type_elem[0].text.strip()
if const_type_elem and const_type_elem[0].text is not None
else "Unknown"
)
value_str = (
const_val_elem[0].text.strip()
if const_val_elem and const_val_elem[0].text is not None
else None
)
if value_str is None:
info["type"] = "error_parsing_constant"
info["value"] = None
print(f"Error: Constante sin valor Access UID={uid}")
if info["datatype"] == "Unknown" and value_str:
val_lower = value_str.lower()
if val_lower in ["true", "false"]:
info["datatype"] = "Bool"
elif value_str.isdigit() or (
value_str.startswith("-") and value_str[1:].isdigit()
):
info["datatype"] = "Int"
elif "." in value_str:
try:
float(value_str)
info["datatype"] = "Real"
except ValueError:
pass
elif "#" in value_str:
parts = value_str.split("#", 1)
prefix = parts[0].upper()
if prefix == "T":
info["datatype"] = "Time"
elif prefix == "LT":
info["datatype"] = "LTime"
elif prefix == "S5T":
info["datatype"] = "S5Time"
elif prefix == "D":
info["datatype"] = "Date"
elif prefix == "DT":
info["datatype"] = "DT"
elif prefix == "DTL":
info["datatype"] = "DTL"
elif prefix == "TOD":
info["datatype"] = "Time_Of_Day"
elif value_str.startswith("'") and value_str.endswith("'"):
info["datatype"] = "String"
else:
info["datatype"] = "TypedConstant"
elif value_str.startswith("'") and value_str.endswith("'"):
info["datatype"] = "String"
info["value"] = value_str
dtype_lower = info["datatype"].lower()
val_str_processed = value_str
if isinstance(value_str, str):
if "#" in value_str:
val_str_processed = value_str.split("#", 1)[-1]
if (
val_str_processed.startswith("'")
and val_str_processed.endswith("'")
and len(val_str_processed) > 1
):
val_str_processed = val_str_processed[1:-1]
try:
if dtype_lower in [
"int",
"dint",
"udint",
"sint",
"usint",
"lint",
"ulint",
"word",
"dword",
"lword",
"byte",
]:
info["value"] = int(val_str_processed)
elif dtype_lower == "bool":
info["value"] = (
val_str_processed.lower() == "true" or val_str_processed == "1"
)
elif dtype_lower in ["real", "lreal"]:
info["value"] = float(val_str_processed)
except (ValueError, TypeError):
info["value"] = value_str
else:
info["type"] = "unknown_structure"
print(f"Advertencia: Access UID={uid} no es Symbol ni Constant.")
if info["type"] == "variable" and info.get("name") is None:
print(f"Error Interno: parse_access var sin nombre UID {uid}.")
info["type"] = "error_no_name"
return info
def parse_part(part_element):
"""Parsea un nodo <flg:Part> de LAD/FBD."""
if part_element is None:
return None
uid = part_element.get("UId")
name = part_element.get("Name")
if not uid or not name:
print(
f"Error: Part sin UID o Name: {etree.tostring(part_element, encoding='unicode')}"
)
return None
template_values = {}
negated_pins = {}
try:
for tv in part_element.xpath("./TemplateValue"):
tv_name = tv.get("Name")
tv_type = tv.get("Type")
if tv_name and tv_type:
template_values[tv_name] = tv_type
except Exception as e:
print(f"Advertencia: Error extrayendo TemplateValues Part UID={uid}: {e}")
try:
for negated_elem in part_element.xpath("./Negated"):
negated_pin_name = negated_elem.get("Name")
if negated_pin_name:
negated_pins[negated_pin_name] = True
except Exception as e:
print(f"Advertencia: Error extrayendo Negated Pins Part UID={uid}: {e}")
return {
"uid": uid,
"type": name,
"template_values": template_values,
"negated_pins": negated_pins,
}
def parse_call(call_element):
"""Parsea un nodo <flg:Call> de LAD/FBD."""
if call_element is None:
return None
uid = call_element.get("UId")
if not uid:
print(
f"Error: Call encontrado sin UID: {etree.tostring(call_element, encoding='unicode')}"
)
return None
call_info_elem = call_element.xpath("./flg:CallInfo", namespaces=ns)
if not call_info_elem:
call_info_elem_no_ns = call_element.xpath("./CallInfo")
if not call_info_elem_no_ns:
print(f"Error: Call UID {uid} sin elemento CallInfo.")
return {"uid": uid, "type": "Call_error", "error": "Missing CallInfo"}
else:
print(f"Advertencia: Call UID {uid} encontró CallInfo SIN namespace.")
call_info = call_info_elem_no_ns[0]
else:
call_info = call_info_elem[0]
block_name = call_info.get("Name")
block_type = call_info.get("BlockType")
if not block_name or not block_type:
print(f"Error: CallInfo para UID {uid} sin Name o BlockType.")
return {
"uid": uid,
"type": "Call_error",
"error": "Missing Name or BlockType in CallInfo",
}
instance_name, instance_scope = None, None
if block_type == "FB":
instance_elem_list = call_info.xpath("./flg:Instance", namespaces=ns)
if instance_elem_list:
instance_elem = instance_elem_list[0]
instance_scope = instance_elem.get("Scope")
component_elem_list = instance_elem.xpath("./flg:Component", namespaces=ns)
if component_elem_list:
component_elem = component_elem_list[0]
db_name_raw = component_elem.get("Name")
if db_name_raw:
instance_name = (
f'"{db_name_raw}"'
if not db_name_raw.startswith('"')
else db_name_raw
)
else:
print(
f"Advertencia: <flg:Component> en <flg:Instance> FB Call UID {uid} sin 'Name'."
)
else:
print(
f"Advertencia: No se encontró <flg:Component> en <flg:Instance> FB Call UID {uid}."
)
else:
print(
f"Advertencia: FB Call '{block_name}' UID {uid} sin <flg:Instance>. ¿Llamada a multi-instancia STAT?"
)
call_scope = call_element.get("Scope")
if call_scope == "LocalVariable":
instance_name = f'"{block_name}"'
instance_scope = "Static"
print(
f"INFO: Asumiendo instancia STAT '{instance_name}' para FB Call UID {uid}."
)
call_data = {
"uid": uid,
"type": "Call",
"block_name": block_name,
"block_type": block_type,
}
if instance_name:
call_data["instance_db"] = instance_name
if instance_scope:
call_data["instance_scope"] = instance_scope
return call_data
def parse_interface_members(member_elements):
"""Parsea recursivamente miembros de interfaz/estructura."""
members_data = []
if not member_elements:
return members_data
for member in member_elements:
member_name = member.get("Name")
member_dtype_raw = member.get("Datatype")
member_version = member.get("Version")
member_remanence = member.get("Remanence", "NonRetain")
member_accessibility = member.get("Accessibility", "Public")
if not member_name or not member_dtype_raw:
print("Advertencia: Miembro sin nombre o tipo de dato. Saltando.")
continue
member_dtype = (
f"{member_dtype_raw}:v{member_version}"
if member_version
else member_dtype_raw
)
member_info = {
"name": member_name,
"datatype": member_dtype,
"remanence": member_remanence,
"accessibility": member_accessibility,
"start_value": None,
"comment": None,
"children": [],
"array_elements": {},
}
comment_node = member.xpath("./iface:Comment", namespaces=ns)
if comment_node:
member_info["comment"] = get_multilingual_text(
comment_node[0]
) # Usa la función robusta
start_value_node = member.xpath("./iface:StartValue", namespaces=ns)
if start_value_node:
constant_name = start_value_node[0].get("ConstantName")
member_info["start_value"] = (
constant_name
if constant_name
else (
start_value_node[0].text
if start_value_node[0].text is not None
else None
)
) # Devolver None si está vacío
nested_sections = member.xpath(
"./iface:Sections/iface:Section[@Name='None']/iface:Member", namespaces=ns
)
if nested_sections:
member_info["children"] = parse_interface_members(nested_sections)
if isinstance(member_dtype, str) and member_dtype.lower().startswith("array["):
subelements = member.xpath("./iface:Subelement", namespaces=ns)
for sub in subelements:
path = sub.get("Path")
sub_start_value_node = sub.xpath("./iface:StartValue", namespaces=ns)
if path and sub_start_value_node:
constant_name = sub_start_value_node[0].get("ConstantName")
value = (
constant_name
if constant_name
else (
sub_start_value_node[0].text
if sub_start_value_node[0].text is not None
else None
)
) # Devolver None si está vacío
member_info["array_elements"][path] = value
sub_comment_node = sub.xpath("./iface:Comment", namespaces=ns)
if path and sub_comment_node:
sub_comment_text = get_multilingual_text(
sub_comment_node[0]
) # Usa la función robusta
if isinstance(member_info["array_elements"].get(path), dict):
member_info["array_elements"][path][
"comment"
] = sub_comment_text
else:
member_info["array_elements"][path] = {
"value": member_info["array_elements"].get(path),
"comment": sub_comment_text,
}
members_data.append(member_info)
return members_data

View File

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import sympy
import traceback
import re
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager

View File

@ -1,12 +1,15 @@
# ToUpload/x0_main.py
import argparse
import subprocess
import os
import sys
import locale
import glob # <--- Importar glob para buscar archivos
import glob
import time
import traceback # <--- Mover import aquí
# (Función get_console_encoding y variable CONSOLE_ENCODING como en la respuesta anterior)
# --- Funciones ---
def get_console_encoding():
"""Obtiene la codificación preferida de la consola, con fallback."""
try:
@ -16,16 +19,18 @@ def get_console_encoding():
CONSOLE_ENCODING = get_console_encoding()
# Descomenta la siguiente línea si quieres ver la codificación detectada:
# print(f"Detected console encoding: {CONSOLE_ENCODING}")
# (Función run_script como en la respuesta anterior, usando CONSOLE_ENCODING)
def run_script(script_name, xml_arg):
"""Runs a given script with the specified XML file argument."""
script_path = os.path.join(os.path.dirname(__file__), script_name)
command = [sys.executable, script_path, xml_arg]
print(f"\n--- Running {script_name} with argument: {xml_arg} ---")
def run_script(script_name, xml_arg, *extra_args):
"""Runs a given script with the specified XML file argument and optional extra args."""
script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), script_name)
python_executable = sys.executable
command = [python_executable, script_path, os.path.abspath(xml_arg)]
command.extend(extra_args)
print(
f"\n--- Running {script_name} with arguments: {[os.path.relpath(arg) if os.path.exists(arg) else arg for arg in command[2:]]} ---"
)
try:
result = subprocess.run(
command,
@ -34,112 +39,63 @@ def run_script(script_name, xml_arg):
text=True,
encoding=CONSOLE_ENCODING,
errors="replace",
) # 'replace' para evitar errores
# Imprimir stdout y stderr
# Eliminar saltos de línea extra al final si existen
stdout_clean = result.stdout.strip()
stderr_clean = result.stderr.strip()
)
stdout_clean = result.stdout.strip() if result.stdout else ""
stderr_clean = result.stderr.strip() if result.stderr else ""
if stdout_clean:
print(stdout_clean)
if stderr_clean:
print("--- Stderr ---")
print(stderr_clean)
print("--------------")
print(f"--- {script_name} finished successfully ---")
print(f"--- Stderr ({script_name}) ---", file=sys.stderr)
print(stderr_clean, file=sys.stderr)
print("--------------------------", file=sys.stderr)
return True
except FileNotFoundError:
print(f"Error: Script '{script_path}' not found.")
print(
f"Error: Script '{script_path}' or Python executable '{python_executable}' not found.",
file=sys.stderr,
)
return False
except subprocess.CalledProcessError as e:
print(f"Error running {script_name}:")
print(f"Return code: {e.returncode}")
stdout_decoded = (
e.stdout.decode(CONSOLE_ENCODING, errors="replace").strip()
if isinstance(e.stdout, bytes)
else (e.stdout or "").strip()
)
stderr_decoded = (
e.stderr.decode(CONSOLE_ENCODING, errors="replace").strip()
if isinstance(e.stderr, bytes)
else (e.stderr or "").strip()
print(
f"Error running {script_name}: Script returned non-zero exit code {e.returncode}.",
file=sys.stderr,
)
stdout_decoded = e.stdout.strip() if e.stdout else ""
stderr_decoded = e.stderr.strip() if e.stderr else ""
# Asegurar indentación correcta aquí
if stdout_decoded:
print("--- Stdout ---")
print(stdout_decoded)
print(f"--- Stdout ({script_name}) ---", file=sys.stderr)
print(stdout_decoded, file=sys.stderr)
if stderr_decoded:
print("--- Stderr ---")
print(stderr_decoded)
print("--------------")
print(f"--- Stderr ({script_name}) ---", file=sys.stderr)
print(stderr_decoded, file=sys.stderr)
print("--------------------------", file=sys.stderr)
# El return debe estar fuera de los if, pero dentro del except
return False
except Exception as e:
print(f"An unexpected error occurred while running {script_name}: {e}")
print(
f"An unexpected error occurred while running {script_name}: {e}",
file=sys.stderr,
)
# No necesitamos importar traceback aquí si ya está al inicio
traceback.print_exc(file=sys.stderr)
return False
# --- NUEVA FUNCIÓN PARA SELECCIONAR ARCHIVO ---
def select_xml_file():
"""Busca archivos .xml, los lista y pide al usuario que elija uno."""
print("No XML file specified. Searching for XML files in current directory...")
# Buscar archivos .xml en el directorio actual (.)
xml_files = sorted(glob.glob("*.xml")) # sorted para orden alfabético
if not xml_files:
print("Error: No .xml files found in the current directory.")
sys.exit(1)
print("\nAvailable XML files:")
for i, filename in enumerate(xml_files, start=1):
print(f" {i}: {filename}")
while True:
try:
choice = input(
f"Enter the number of the file to process (1-{len(xml_files)}): "
)
choice_num = int(choice)
if 1 <= choice_num <= len(xml_files):
selected_file = xml_files[choice_num - 1]
print(f"Selected: {selected_file}")
return selected_file
else:
print("Invalid choice. Please enter a number from the list.")
except ValueError:
print("Invalid input. Please enter a number.")
except EOFError: # Manejar si la entrada se cierra inesperadamente
print("\nSelection cancelled.")
sys.exit(1)
# --- FIN NUEVA FUNCIÓN ---
# --- Bloque Principal ---
if __name__ == "__main__":
# Imports necesarios para esta sección
import os
import sys
import glob # Asegúrate de que glob esté importado al principio del archivo
# Directorio base donde buscar los archivos XML (relativo al script)
# --- PARTE 1: BUSCAR ARCHIVOS ---
base_search_dir = "XML Project"
script_dir = os.path.dirname(__file__) # Directorio donde está x0_main.py
script_dir = os.path.dirname(os.path.abspath(__file__))
xml_project_dir = os.path.join(script_dir, base_search_dir)
print(f"Buscando archivos XML recursivamente en: '{xml_project_dir}'")
# Verificar si el directorio 'XML Project' existe
if not os.path.isdir(xml_project_dir):
print(
f"Error: El directorio '{xml_project_dir}' no existe o no es un directorio."
)
print(
"Por favor, crea el directorio 'XML Project' en la misma carpeta que este script y coloca tus archivos XML dentro."
)
print(f"Error: El directorio '{xml_project_dir}' no existe.", file=sys.stderr)
sys.exit(1)
# Buscar todos los archivos .xml recursivamente dentro de xml_project_dir
# Usamos os.path.join para construir la ruta de búsqueda correctamente
# y '**/*.xml' para la recursividad con glob
search_pattern = os.path.join(xml_project_dir, "**", "*.xml")
xml_files_found = glob.glob(search_pattern, recursive=True)
@ -147,111 +103,113 @@ if __name__ == "__main__":
print(
f"No se encontraron archivos XML en '{xml_project_dir}' o sus subdirectorios."
)
sys.exit(0) # Salir limpiamente si no hay archivos
sys.exit(0)
print(f"Se encontraron {len(xml_files_found)} archivos XML para procesar:")
# Ordenar para un procesamiento predecible (opcional)
xml_files_found.sort()
for xml_file in xml_files_found:
# Imprimir la ruta relativa desde el directorio del script para claridad
print(f" - {os.path.relpath(xml_file, script_dir)}")
# Scripts a ejecutar en secuencia (asegúrate que los nombres son correctos)
# --- PARTE 2: EJECUTAR x1 PARA TODOS LOS ARCHIVOS ---
print("\n--- Fase 1: Ejecutando x1_to_json.py para todos los archivos XML ---")
script1 = "x1_to_json.py"
failed_x1_files = []
processed_x1_count = 0
for xml_filepath in xml_files_found:
relative_path = os.path.relpath(xml_filepath, script_dir)
if run_script(script1, xml_filepath):
print(f"--- {script1} completado para: {relative_path} ---")
processed_x1_count += 1
else:
print(f"--- {script1} FALLÓ para: {relative_path} ---", file=sys.stderr)
failed_x1_files.append(relative_path)
# ... (Resumen Fase 1 y manejo de errores sin cambios) ...
print(f"\n--- Resumen Fase 1 (x1) ---")
print(f"Archivos procesados por x1: {processed_x1_count}")
print(f"Archivos fallidos en x1: {len(failed_x1_files)}")
if failed_x1_files:
print("Archivos fallidos:")
[print(f" - {f}") for f in failed_x1_files]
print("---------------------------")
if len(failed_x1_files) == len(xml_files_found):
print(
"\nError Crítico: Todos los archivos fallaron en la Fase 1 (x1). Abortando.",
file=sys.stderr,
)
sys.exit(1)
elif failed_x1_files:
print(
"\nAdvertencia: Algunos archivos fallaron en la Fase 1 (x1). Se continuará con los exitosos.",
file=sys.stderr,
)
successful_xml_files = [
f
for f in xml_files_found
if os.path.relpath(f, script_dir) not in failed_x1_files
]
else:
successful_xml_files = xml_files_found
# --- PARTE 3: EJECUTAR x2 y x3 PARA LOS ARCHIVOS EXITOSOS DE x1 ---
print("\n--- Fase 2: Ejecutando x2_process.py y x3_generate_scl.py ---")
script2 = "x2_process.py"
script3 = "x3_generate_scl.py"
processed_pipeline_count = 0
failed_pipeline_files = set()
# Procesar cada archivo encontrado
processed_count = 0
failed_count = 0
for xml_filepath in xml_files_found:
print(
f"\n--- Iniciando pipeline para: {os.path.relpath(xml_filepath, script_dir)} ---"
)
for xml_filepath in successful_xml_files:
relative_path = os.path.relpath(xml_filepath, script_dir)
print(f"\n--- Iniciando pipeline (x2, x3) para: {relative_path} ---")
# Usar la ruta absoluta para evitar problemas si los scripts cambian de directorio
absolute_xml_filepath = os.path.abspath(xml_filepath)
# Ejecutar x2
success_x2 = run_script(script2, xml_filepath)
if not success_x2:
print(
f"--- Pipeline falló en '{script2}' para: {relative_path} ---",
file=sys.stderr,
)
failed_pipeline_files.add(relative_path)
continue
# Ejecutar los scripts en secuencia para el archivo actual
# La función run_script ya está definida en tu script x0_main.py
success = True
if not run_script(script1, absolute_xml_filepath):
print(
f"\nPipeline falló en el script '{script1}' para el archivo: {os.path.relpath(xml_filepath, script_dir)}"
)
success = False
elif not run_script(script2, absolute_xml_filepath):
print(
f"\nPipeline falló en el script '{script2}' para el archivo: {os.path.relpath(xml_filepath, script_dir)}"
)
success = False
elif not run_script(script3, absolute_xml_filepath):
print(
f"\nPipeline falló en el script '{script3}' para el archivo: {os.path.relpath(xml_filepath, script_dir)}"
)
success = False
print(f"--- {script2} completado para: {relative_path} ---")
if success:
# Ejecutar x3, PASANDO la ruta raíz del proyecto
success_x3 = run_script(script3, xml_filepath, xml_project_dir)
if not success_x3:
print(
f"--- Pipeline completado exitosamente para: {os.path.relpath(xml_filepath, script_dir)} ---"
)
processed_count += 1
else:
failed_count += 1
print(
f"--- Pipeline falló para: {os.path.relpath(xml_filepath, script_dir)} ---"
f"--- Pipeline falló en '{script3}' para: {relative_path} ---",
file=sys.stderr,
)
failed_pipeline_files.add(relative_path)
continue
print("\n--- Resumen Final del Procesamiento ---")
print(f"--- {script3} completado para: {relative_path} ---")
print(
f"--- Pipeline (x2, x3) completado exitosamente para: {relative_path} ---"
)
processed_pipeline_count += 1
# --- PARTE 4: RESUMEN FINAL ---
total_processed_initially = len(successful_xml_files)
final_failed_count = len(failed_pipeline_files)
final_success_count = total_processed_initially - final_failed_count
print("\n--- Resumen Final del Procesamiento Completo ---")
print(f"Total de archivos XML encontrados: {len(xml_files_found)}")
print(
f"Archivos procesados exitosamente por el pipeline completo: {processed_count}"
f"Archivos procesados por Fase 1 (x1): {processed_x1_count} (Fallidos: {len(failed_x1_files)})"
)
print(f"Archivos que fallaron en algún punto del pipeline: {failed_count}")
print("---------------------------------------")
xml_filename = None
print(f"Archivos procesados por Fase 2 (x2 y x3): {final_success_count}")
print(f"Archivos que fallaron en Fase 2 (x2 o x3): {final_failed_count}")
if failed_pipeline_files:
print("Archivos fallidos en Fase 2:")
for f in sorted(list(failed_pipeline_files)):
print(f" - {f}")
print("------------------------------------------------")
# Comprobar si se pasó un argumento de línea de comandos
# sys.argv[0] es el nombre del script, sys.argv[1] sería el primer argumento
if len(sys.argv) > 1:
# Si hay argumentos, usar argparse para parsearlo (permite -h, etc.)
parser = argparse.ArgumentParser(
description="Run the Simatic XML processing pipeline."
)
parser.add_argument(
"xml_file",
# Ya no necesitamos nargs='?' ni default aquí porque sabemos que hay un argumento
help="Path to the XML file to process.",
)
# Parsear solo los argumentos conocidos, ignorar extras si los hubiera
args, unknown = parser.parse_known_args()
xml_filename = args.xml_file
print(f"XML file specified via argument: {xml_filename}")
else:
# Si no hay argumentos, llamar a la función interactiva
xml_filename = select_xml_file()
# --- El resto del script continúa igual, usando xml_filename ---
# Verificar si el archivo XML de entrada (seleccionado o pasado) existe
if not os.path.exists(xml_filename):
print(f"Error: Selected or specified XML file not found: {xml_filename}")
if failed_x1_files or final_failed_count > 0:
sys.exit(1)
print(f"\nStarting pipeline for: {xml_filename}")
# Run scripts sequentially (asegúrate que los nombres son correctos)
script1 = "x1_to_json.py"
script2 = "x2_process.py"
script3 = "x3_generate_scl.py"
if run_script(script1, xml_filename):
if run_script(script2, xml_filename):
if run_script(script3, xml_filename):
print("\nPipeline completed successfully.")
else:
print("\nPipeline failed at script:", script3)
else:
print("\nPipeline failed at script:", script2)
else:
print("\nPipeline failed at script:", script1)
sys.exit(0)

File diff suppressed because it is too large Load Diff

View File

@ -18,17 +18,15 @@ from processors.processor_utils import (
from processors.symbol_manager import SymbolManager # Import the manager
# --- Constantes y Configuración ---
# SCL_SUFFIX = "_scl" # Old suffix
SCL_SUFFIX = "_sympy_processed" # New suffix to indicate processing method
SCL_SUFFIX = "_sympy_processed"
GROUPED_COMMENT = "// Logic included in grouped IF"
SIMPLIFIED_IF_COMMENT = "// Simplified IF condition by script" # May still be useful
SIMPLIFIED_IF_COMMENT = "// Simplified IF condition by script"
# Global data dictionary (consider passing 'data' as argument if needed elsewhere)
# It's currently used by process_group_ifs implicitly via the outer scope,
# which works but passing it explicitly might be cleaner.
# Global data dictionary
data = {}
# --- (process_group_ifs y load_processors SIN CAMBIOS) ---
def process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data):
"""
Busca condiciones (ya procesadas -> tienen expr SymPy en sympy_map)
@ -112,6 +110,15 @@ def process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data):
# SCoil/RCoil might also be groupable if their SCL is final assignment
"SCoil",
"RCoil",
"BLKMOV", # Added BLKMOV
"TON",
"TOF",
"TP",
"Se",
"Sd", # Added timers
"CTU",
"CTD",
"CTUD", # Added counters
]
for consumer_instr in network_logic:
@ -135,26 +142,26 @@ def process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data):
is_enabled_by_us = True
# Check if consumer is groupable AND has its final SCL generated
# The suffix check needs adjustment based on how terminating processors set it.
# Assuming processors like Move, Add, Call, SCoil, RCoil NOW generate final SCL and add a suffix.
if (
is_enabled_by_us
and consumer_type.endswith(SCL_SUFFIX) # Or a specific "final_scl" suffix
and consumer_type.endswith(SCL_SUFFIX) # Check if processed
and consumer_type_original in groupable_types
):
consumer_scl = consumer_instr.get("scl", "")
# Extract core SCL (logic is similar, maybe simpler if SCL is cleaner now)
# Extract core SCL
core_scl = None
if consumer_scl:
# If consumer SCL itself is an IF generated by EN, take the body
if consumer_scl.strip().startswith("IF"):
match = re.search(
r"THEN\s*(.*?)\s*END_IF;",
r"IF\s+.*?THEN\s*(.*?)\s*END_IF;", # More robust regex
consumer_scl,
re.DOTALL | re.IGNORECASE,
)
core_scl = match.group(1).strip() if match else None
# If body contains another IF, maybe don't group? (optional complexity)
# if core_scl and core_scl.strip().startswith("IF"): core_scl = None
elif not consumer_scl.strip().startswith(
"//"
): # Otherwise, take the whole line if not comment
@ -267,9 +274,9 @@ def load_processors(processors_dir="processors"):
"func": processor_func,
}
)
print(
f" - Cargado '{type_name}' (Prio: {priority}) desde {module_name_rel}.py"
)
#print(
# f" - Cargado '{type_name}' (Prio: {priority}) desde {module_name_rel}.py"
#)
else:
print(
f" Advertencia: 'processor_func' para '{type_name}' en {full_module_name} no es callable."
@ -292,26 +299,26 @@ def load_processors(processors_dir="processors"):
# Ordenar la lista por prioridad (menor primero)
processor_list_sorted = sorted(processor_list_unsorted, key=lambda x: x["priority"])
print(f"\nTotal de tipos de procesadores cargados: {len(processor_map)}")
print(
f"Orden de procesamiento por prioridad: {[item['type_name'] for item in processor_list_sorted]}"
)
#print(f"\nTotal de tipos de procesadores cargados: {len(processor_map)}")
#print(
# f"Orden de procesamiento por prioridad: {[item['type_name'] for item in processor_list_sorted]}"
#)
# Devolver el mapa (para lookup rápido si es necesario) y la lista ordenada
return processor_map, processor_list_sorted
# --- Bucle Principal de Procesamiento (Modificado para STL) ---
def process_json_to_scl(json_filepath):
# --- Bucle Principal de Procesamiento (MODIFICADO) ---
def process_json_to_scl(json_filepath, output_json_filepath):
"""
Lee JSON simplificado, aplica procesadores dinámicos (ignorando redes STL y bloques DB),
y guarda JSON procesado.
Lee JSON simplificado, aplica procesadores dinámicos (ignorando STL, UDT, TagTable, DB),
y guarda JSON procesado en la ruta especificada.
"""
global data
if not os.path.exists(json_filepath):
print(f"Error: JSON no encontrado: {json_filepath}")
return
return False
print(f"Cargando JSON desde: {json_filepath}")
try:
with open(json_filepath, "r", encoding="utf-8") as f:
@ -319,44 +326,45 @@ def process_json_to_scl(json_filepath):
except Exception as e:
print(f"Error al cargar JSON: {e}")
traceback.print_exc()
return
return False
# --- Obtener lenguaje del bloque principal ---
block_language = data.get("language", "Unknown")
block_type = data.get("block_type", "Unknown") # FC, FB, GlobalDB
print(f"Procesando bloque tipo: {block_type}, Lenguaje principal: {block_language}")
# --- MODIFICADO: Obtener tipo de bloque (FC, FB, GlobalDB, OB, PlcUDT, PlcTagTable) ---
block_type = data.get("block_type", "Unknown")
print(f"Procesando bloque tipo: {block_type}")
# --- SI ES UN DB, SALTAR EL PROCESAMIENTO LÓGICO ---
if block_language == "DB":
print(
"INFO: El bloque es un Data Block (DB). Saltando procesamiento lógico de x2."
)
# Simplemente guardamos una copia (o el mismo archivo si no se requiere sufijo)
output_filename = json_filepath.replace(
"_simplified.json", "_simplified_processed.json"
)
print(f"Guardando JSON de DB (sin cambios lógicos) en: {output_filename}")
# --- MODIFICADO: SALTAR PROCESAMIENTO PARA DB, UDT, TAG TABLE ---
if block_type in ["GlobalDB", "PlcUDT", "PlcTagTable"]: # <-- Comprobar tipos a saltar
print(f"INFO: El bloque es {block_type}. Saltando procesamiento lógico de x2.")
print(f"Guardando JSON de {block_type} (sin cambios lógicos) en: {output_json_filepath}")
try:
with open(output_filename, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4, ensure_ascii=False)
print("Guardado de DB completado.")
# CORRECCIÓN IMPORTANTE: x2 debe leer el .json y escribir el _processed.json
# Así que debe cargar el archivo de entrada y guardarlo en el de salida
if not os.path.exists(json_filepath):
print(f"Error Crítico (x2 skip): JSON de entrada no encontrado '{json_filepath}' para {block_type}", file=sys.stderr)
return False
with open(json_filepath, "r", encoding="utf-8") as f_in:
data_to_save = json.load(f_in) # Cargar el json original
with open(output_json_filepath, "w", encoding="utf-8") as f_out:
json.dump(data_to_save, f_out, indent=4, ensure_ascii=False) # Guardar en _processed.json
print(f"Guardado de {block_type} completado.")
return True
except Exception as e:
print(f"Error Crítico al guardar JSON del DB: {e}")
print(f"Error Crítico al guardar JSON de {block_type}: {e}")
traceback.print_exc()
return # <<< SALIR TEMPRANO PARA DBs
return False
# --- SI NO ES DB, CONTINUAR CON EL PROCESAMIENTO LÓGICO (FC/FB) ---
print("INFO: El bloque es FC/FB. Iniciando procesamiento lógico...")
# --- SI NO ES DB/UDT/TAG TABLE (FC, FB, OB), CONTINUAR CON EL PROCESAMIENTO LÓGICO ---
print(f"INFO: El bloque es {block_type}. Iniciando procesamiento lógico...")
# (Carga de procesadores y mapas de acceso SIN CAMBIOS)
script_dir = os.path.dirname(__file__)
processors_dir_path = os.path.join(script_dir, "processors")
processor_map, sorted_processors = load_processors(processors_dir_path)
if not processor_map:
print("Error crítico: No se cargaron procesadores. Abortando.")
return
return False
network_access_maps = {}
# Crear mapas de acceso por red (copiado/adaptado de versión anterior)
for network in data.get("networks", []):
net_id = network["id"]
current_access_map = {}
@ -385,13 +393,14 @@ def process_json_to_scl(json_filepath):
current_access_map[dest["uid"]] = dest
network_access_maps[net_id] = current_access_map
# (Inicialización de SymbolManager y bucle iterativo SIN CAMBIOS)
symbol_manager = SymbolManager()
sympy_map = {}
max_passes = 30
passes = 0
processing_complete = False
print("\n--- Iniciando Bucle de Procesamiento Iterativo (FC/FB) ---")
print(f"\n--- Iniciando Bucle de Procesamiento Iterativo ({block_type}) ---")
while passes < max_passes and not processing_complete:
passes += 1
made_change_in_base_pass = False
@ -400,9 +409,9 @@ def process_json_to_scl(json_filepath):
num_sympy_processed_this_pass = 0
num_grouped_this_pass = 0
# --- FASE 1: Procesadores Base (Ignorando STL) ---
# FASE 1: Procesadores Base (Ignorando STL)
print(f" Fase 1 (SymPy Base - Orden por Prioridad):")
num_sympy_processed_this_pass = 0 # Resetear contador para el pase
num_sympy_processed_this_pass = 0
for processor_info in sorted_processors:
current_type_name = processor_info["type_name"]
func_to_call = processor_info["func"]
@ -410,28 +419,36 @@ def process_json_to_scl(json_filepath):
network_id = network["id"]
network_lang = network.get("language", "LAD")
if network_lang == "STL":
continue # Saltar STL
continue
access_map = network_access_maps.get(network_id, {})
network_logic = network.get("logic", [])
for instruction in network_logic:
instr_uid = instruction.get("instruction_uid")
instr_type_original = instruction.get("type", "Unknown")
instr_type_current = instruction.get("type", "Unknown")
if (
instr_type_original.endswith(SCL_SUFFIX)
or "_error" in instr_type_original
instr_type_current.endswith(SCL_SUFFIX)
or "_error" in instr_type_current
or instruction.get("grouped", False)
or instr_type_original
in ["RAW_STL_CHUNK", "RAW_SCL_CHUNK", "UNSUPPORTED_LANG"]
or instr_type_current
in [
"RAW_STL_CHUNK",
"RAW_SCL_CHUNK",
"UNSUPPORTED_LANG",
"UNSUPPORTED_CONTENT",
"PARSING_ERROR",
]
):
continue
lookup_key = instr_type_original.lower()
lookup_key = instr_type_current.lower()
effective_type_name = lookup_key
if instr_type_original == "Call":
block_type = instruction.get("block_type", "").upper()
if block_type == "FC":
if instr_type_current == "Call":
call_block_type = instruction.get("block_type", "").upper()
if call_block_type == "FC":
effective_type_name = "call_fc"
elif block_type == "FB":
elif call_block_type == "FB":
effective_type_name = "call_fb"
if effective_type_name == current_type_name:
@ -444,31 +461,51 @@ def process_json_to_scl(json_filepath):
num_sympy_processed_this_pass += 1
except Exception as e:
print(
f"ERROR(SymPy Base) al procesar {instr_type_original} UID {instr_uid}: {e}"
f"ERROR(SymPy Base) al procesar {instr_type_current} UID {instr_uid}: {e}"
)
traceback.print_exc()
instruction["scl"] = (
f"// ERROR en SymPy procesador base: {e}"
)
instruction["type"] = instr_type_original + "_error"
instruction["type"] = instr_type_current + "_error"
made_change_in_base_pass = True
print(
f" -> {num_sympy_processed_this_pass} instrucciones (no STL) procesadas con SymPy."
)
# --- FASE 2: Agrupación IF (Ignorando STL) ---
if (
made_change_in_base_pass or passes == 1
): # Ejecutar siempre en el primer pase
# FASE 2: Agrupación IF (Ignorando STL)
if made_change_in_base_pass or passes == 1:
print(f" Fase 2 (Agrupación IF con Simplificación):")
num_grouped_this_pass = 0 # Resetear contador para el pase
num_grouped_this_pass = 0
for network in data.get("networks", []):
network_id = network["id"]
network_lang = network.get("language", "LAD")
if network_lang == "STL":
continue # Saltar STL
continue
network_logic = network.get("logic", [])
for instruction in network_logic:
uids_in_network = sorted(
[
instr.get("instruction_uid", "Z")
for instr in network_logic
if instr.get("instruction_uid")
]
)
for uid_to_process in uids_in_network:
instruction = next(
(
instr
for instr in network_logic
if instr.get("instruction_uid") == uid_to_process
),
None,
)
if not instruction:
continue
if instruction.get("grouped") or "_error" in instruction.get(
"type", ""
):
continue
if instruction.get("type", "").endswith(SCL_SUFFIX):
try:
group_changed = process_group_ifs(
instruction, network_id, sympy_map, symbol_manager, data
@ -485,7 +522,7 @@ def process_json_to_scl(json_filepath):
f" -> {num_grouped_this_pass} agrupaciones realizadas (en redes no STL)."
)
# --- Comprobar si se completó el procesamiento ---
# Comprobar si se completó
if not made_change_in_base_pass and not made_change_in_group_pass:
print(
f"\n--- No se hicieron más cambios en el pase {passes}. Proceso iterativo completado. ---"
@ -495,28 +532,28 @@ def process_json_to_scl(json_filepath):
print(
f"--- Fin Pase {passes}: {num_sympy_processed_this_pass} proc SymPy, {num_grouped_this_pass} agrup. Continuando..."
)
# --- Comprobar límite de pases ---
if passes == max_passes and not processing_complete:
print(f"\n--- ADVERTENCIA: Límite de {max_passes} pases alcanzado...")
# --- FIN BUCLE ITERATIVO ---
# --- Verificación Final (Ajustada para RAW_STL_CHUNK) ---
print("\n--- Verificación Final de Instrucciones No Procesadas (FC/FB) ---")
# (Verificación Final y Guardado JSON SIN CAMBIOS)
print(f"\n--- Verificación Final de Instrucciones No Procesadas ({block_type}) ---")
unprocessed_count = 0
unprocessed_details = []
ignored_types = [
"raw_scl_chunk",
"unsupported_lang",
"raw_stl_chunk",
] # Añadido raw_stl_chunk
"unsupported_content",
"parsing_error",
]
for network in data.get("networks", []):
network_id = network.get("id", "Unknown ID")
network_title = network.get("title", f"Network {network_id}")
network_lang = network.get("language", "LAD")
if network_lang == "STL":
continue # No verificar redes STL
continue
for instruction in network.get("logic", []):
instr_uid = instruction.get("instruction_uid", "Unknown UID")
instr_type = instruction.get("type", "Unknown Type")
@ -529,8 +566,7 @@ def process_json_to_scl(json_filepath):
):
unprocessed_count += 1
unprocessed_details.append(
f" - Red '{network_title}' (ID: {network_id}, Lang: {network_lang}), "
f"Instrucción UID: {instr_uid}, Tipo: '{instr_type}'"
f" - Red '{network_title}' (ID: {network_id}, Lang: {network_lang}), Instrucción UID: {instr_uid}, Tipo: '{instr_type}'"
)
if unprocessed_count > 0:
print(
@ -543,81 +579,65 @@ def process_json_to_scl(json_filepath):
"INFO: Todas las instrucciones relevantes (no STL) parecen haber sido procesadas o agrupadas."
)
# --- Guardar JSON Final ---
output_filename = json_filepath.replace(
"_simplified.json", "_simplified_processed.json"
)
print(f"\nGuardando JSON procesado (FC/FB) en: {output_filename}")
print(f"\nGuardando JSON procesado ({block_type}) en: {output_json_filepath}")
try:
with open(output_filename, "w", encoding="utf-8") as f:
with open(output_json_filepath, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4, ensure_ascii=False)
print("Guardado completado.")
return True
except Exception as e:
print(f"Error Crítico al guardar JSON procesado: {e}")
traceback.print_exc()
return False
# --- Ejecución (sin cambios) ---
# --- Ejecución (MODIFICADO) ---
if __name__ == "__main__":
# Imports necesarios solo para la ejecución como script principal
import argparse
import os
import sys
# Configurar ArgumentParser para recibir la ruta del XML original obligatoria
parser = argparse.ArgumentParser(
description="Process simplified JSON (_simplified.json) to embed SCL logic (SymPy version). Expects original XML filepath as argument."
description="Process simplified JSON to embed SCL logic. Expects original XML filepath as argument."
)
parser.add_argument(
"source_xml_filepath", # Argumento posicional obligatorio
help="Path to the original source XML file (passed from x0_main.py, used to derive JSON input name).",
"source_xml_filepath",
help="Path to the original source XML file (passed from x0_main.py).",
)
args = parser.parse_args() # Parsea los argumentos de sys.argv
args = parser.parse_args()
source_xml_file = args.source_xml_filepath
source_xml_file = args.source_xml_filepath # Obtiene la ruta del XML original
# Verificar si el archivo XML original existe (como referencia, útil para depuración)
# No es estrictamente necesario para la lógica aquí, pero ayuda a confirmar
if not os.path.exists(source_xml_file):
print(
f"Advertencia (x2): Archivo XML original no encontrado: '{source_xml_file}', pero se intentará encontrar el JSON correspondiente."
)
# No salir necesariamente, pero es bueno saberlo.
# Derivar nombre del archivo JSON de entrada (_simplified.json)
xml_filename_base = os.path.splitext(os.path.basename(source_xml_file))[0]
# Asumir que el JSON simplificado está en el mismo directorio que el XML original
input_dir = os.path.dirname(source_xml_file) # Directorio del XML original
input_json_file = os.path.join(input_dir, f"{xml_filename_base}_simplified.json")
base_dir = os.path.dirname(source_xml_file)
parsing_dir = os.path.join(base_dir, "parsing")
input_json_file = os.path.join(parsing_dir, f"{xml_filename_base}.json")
output_json_file = os.path.join(parsing_dir, f"{xml_filename_base}_processed.json")
# Determinar el nombre esperado del archivo JSON procesado de salida
output_json_file = os.path.join(
input_dir, f"{xml_filename_base}_simplified_processed.json"
)
os.makedirs(parsing_dir, exist_ok=True)
print(
f"(x2) Procesando: '{os.path.relpath(input_json_file)}' -> '{os.path.relpath(output_json_file)}'"
)
# Verificar si el archivo JSON de entrada (_simplified.json) EXISTE antes de procesar
if not os.path.exists(input_json_file):
print(
f"Error Fatal (x2): El archivo de entrada JSON simplificado no existe: '{input_json_file}'"
f"Error Fatal (x2): El archivo de entrada JSON no existe: '{input_json_file}'"
)
print(
f"Asegúrate de que 'x1_to_json.py' se ejecutó correctamente para '{os.path.relpath(source_xml_file)}'."
)
sys.exit(1) # Salir si el archivo necesario no está
sys.exit(1)
else:
# Llamar a la función principal de procesamiento del script
# Asumiendo que tu función principal se llama process_json_to_scl(input_json_path)
try:
process_json_to_scl(input_json_file)
success = process_json_to_scl(input_json_file, output_json_file)
if success:
sys.exit(0)
else:
sys.exit(1)
except Exception as e:
print(
f"Error Crítico (x2) durante el procesamiento de '{input_json_file}': {e}"
)
import traceback
traceback.print_exc()
sys.exit(1) # Salir con error si la función principal falla
sys.exit(1)

View File

@ -1,552 +1,129 @@
# x3_generate_scl.py
# ToUpload/x3_generate_scl.py
# -*- coding: utf-8 -*-
import json
import os
import re
import argparse
import sys
import traceback # Importar traceback para errores
import traceback
# --- Importar Utilidades y Constantes (Asumiendo ubicación) ---
# --- Importar Generadores Específicos ---
try:
# Intenta importar desde el paquete de procesadores si está estructurado así
from processors.processor_utils import format_variable_name
# Definir SCL_SUFFIX aquí o importarlo si está centralizado
SCL_SUFFIX = "_sympy_processed" # Asegúrate que coincida con x2_process.py
GROUPED_COMMENT = (
"// Logic included in grouped IF" # Opcional, si se usa para filtrar
)
except ImportError:
print(
"Advertencia: No se pudo importar 'format_variable_name' desde processors.processor_utils."
)
print(
"Usando una implementación local básica (¡PUEDE FALLAR CON NOMBRES COMPLEJOS!)."
)
# Implementación local BÁSICA como fallback (MENOS RECOMENDADA)
def format_variable_name(name):
if not name:
return "_INVALID_NAME_"
if name.startswith('"') and name.endswith('"'):
return name # Mantener comillas
prefix = "#" if name.startswith("#") else ""
if prefix:
name = name[1:]
if name and name[0].isdigit():
name = "_" + name
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
return prefix + name
SCL_SUFFIX = "_sympy_processed"
GROUPED_COMMENT = "// Logic included in grouped IF"
# para formatear valores iniciales
def format_scl_start_value(value, datatype):
"""Formatea un valor para la inicialización SCL según el tipo."""
if value is None:
return None
datatype_lower = datatype.lower() if datatype else ""
value_str = str(value)
if "bool" in datatype_lower:
return "TRUE" if value_str.lower() == "true" else "FALSE"
elif "string" in datatype_lower:
escaped_value = value_str.replace("'", "''")
if escaped_value.startswith("'") and escaped_value.endswith("'"):
escaped_value = escaped_value[1:-1]
return f"'{escaped_value}'"
elif "char" in datatype_lower: # Añadido Char
escaped_value = value_str.replace("'", "''")
if escaped_value.startswith("'") and escaped_value.endswith("'"):
escaped_value = escaped_value[1:-1]
return f"'{escaped_value}'"
elif any(
t in datatype_lower
for t in [
"int",
"byte",
"word",
"dint",
"dword",
"lint",
"lword",
"sint",
"usint",
"uint",
"udint",
"ulint",
]
): # Ampliado
try:
return str(int(value_str))
except ValueError:
if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", value_str):
return value_str
return f"'{value_str}'" # O como string si no es entero ni símbolo
elif "real" in datatype_lower or "lreal" in datatype_lower:
try:
f_val = float(value_str)
s_val = str(f_val)
if "." not in s_val and "e" not in s_val.lower():
s_val += ".0"
return s_val
except ValueError:
if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", value_str):
return value_str
return f"'{value_str}'"
elif "time" in datatype_lower: # Añadido Time, S5Time, LTime
# Quitar T#, LT#, S5T# si existen
prefix = ""
if value_str.upper().startswith("T#"):
prefix = "T#"
value_str = value_str[2:]
elif value_str.upper().startswith("LT#"):
prefix = "LT#"
value_str = value_str[3:]
elif value_str.upper().startswith("S5T#"):
prefix = "S5T#"
value_str = value_str[4:]
# Devolver con el prefijo correcto o T# por defecto si no había
if prefix:
return f"{prefix}{value_str}"
elif "s5time" in datatype_lower:
return f"S5T#{value_str}"
elif "ltime" in datatype_lower:
return f"LT#{value_str}"
else:
return f"T#{value_str}" # Default a TIME
elif "date" in datatype_lower: # Añadido Date, DT, TOD
if value_str.upper().startswith("D#"):
return value_str
elif "dt" in datatype_lower or "date_and_time" in datatype_lower:
if value_str.upper().startswith("DT#"):
return value_str
else:
return f"DT#{value_str}" # Añadir prefijo DT#
elif "tod" in datatype_lower or "time_of_day" in datatype_lower:
if value_str.upper().startswith("TOD#"):
return value_str
else:
return f"TOD#{value_str}" # Añadir prefijo TOD#
else:
return f"D#{value_str}" # Default a Date
# Fallback genérico
else:
if re.match(
r'^[a-zA-Z_][a-zA-Z0-9_."#\[\]]+$', value_str
): # Permitir más caracteres en símbolos/tipos
# Si es un UDT o Struct complejo, podría venir con comillas, quitarlas
if value_str.startswith('"') and value_str.endswith('"'):
return value_str[1:-1]
return value_str
else:
escaped_value = value_str.replace("'", "''")
return f"'{escaped_value}'"
# --- NUEVA FUNCIÓN RECURSIVA para generar declaraciones SCL (VAR/STRUCT/ARRAY) ---
def generate_scl_declarations(variables, indent_level=1):
"""Genera las líneas SCL para declarar variables, structs y arrays."""
scl_lines = []
indent = " " * indent_level
for var in variables:
var_name_scl = format_variable_name(var.get("name"))
var_dtype_raw = var.get("datatype", "VARIANT")
# Limpiar comillas de tipos de datos UDT ("MyType" -> MyType)
var_dtype = (
var_dtype_raw.strip('"')
if var_dtype_raw.startswith('"') and var_dtype_raw.endswith('"')
else var_dtype_raw
)
var_comment = var.get("comment")
start_value = var.get("start_value")
children = var.get("children") # Para structs
array_elements = var.get("array_elements") # Para arrays
# Manejar tipos de datos Array especiales
array_match = re.match(r"(Array\[.*\]\s+of\s+)(.*)", var_dtype, re.IGNORECASE)
base_type_for_init = var_dtype
declaration_dtype = var_dtype
if array_match:
array_prefix = array_match.group(1)
base_type_raw = array_match.group(2).strip()
# Limpiar comillas del tipo base del array
base_type_for_init = (
base_type_raw.strip('"')
if base_type_raw.startswith('"') and base_type_raw.endswith('"')
else base_type_raw
)
declaration_dtype = (
f'{array_prefix}"{base_type_for_init}"'
if '"' not in base_type_raw
else f"{array_prefix}{base_type_raw}"
) # Reconstruir con comillas si es UDT
# Reconstruir declaración con comillas si es UDT y no array
elif (
not array_match and var_dtype != base_type_for_init
): # Es un tipo que necesita comillas (UDT)
declaration_dtype = f'"{var_dtype}"'
declaration_line = f"{indent}{var_name_scl} : {declaration_dtype}"
init_value = None
# ---- Arrays ----
if array_elements:
# Ordenar índices (asumiendo que son numéricos)
try:
sorted_indices = sorted(array_elements.keys(), key=int)
except ValueError:
sorted_indices = sorted(
array_elements.keys()
) # Fallback a orden alfabético
init_values = [
format_scl_start_value(array_elements[idx], base_type_for_init)
for idx in sorted_indices
]
valid_inits = [v for v in init_values if v is not None]
if valid_inits:
init_value = f"[{', '.join(valid_inits)}]"
# ---- Structs ----
elif children:
# No añadir comentario // Struct aquí, es redundante
scl_lines.append(declaration_line) # Añadir línea de declaración base
scl_lines.append(f"{indent}STRUCT")
scl_lines.extend(generate_scl_declarations(children, indent_level + 1))
scl_lines.append(f"{indent}END_STRUCT;")
if var_comment:
scl_lines.append(f"{indent}// {var_comment}")
scl_lines.append("") # Línea extra
continue # Saltar resto para Struct
# ---- Tipos Simples ----
else:
if start_value is not None:
init_value = format_scl_start_value(start_value, var_dtype)
# Añadir inicialización si existe
if init_value:
declaration_line += f" := {init_value}"
declaration_line += ";"
if var_comment:
declaration_line += f" // {var_comment}"
scl_lines.append(declaration_line)
return scl_lines
# --- Función Principal de Generación SCL ---
def generate_scl(processed_json_filepath, output_scl_filepath):
"""Genera un archivo SCL a partir del JSON procesado (FC/FB o DB)."""
from generators.generate_scl_db import generate_scl_for_db
from generators.generate_scl_code_block import generate_scl_for_code_block
from generators.generate_md_udt import generate_udt_markdown
from generators.generate_md_tag_table import generate_tag_table_markdown
from generators.generator_utils import format_variable_name, generate_scl_declarations # Importar generate_scl_declarations
except ImportError as e:
print(f"Error crítico: No se pudieron importar los módulos de 'generators': {e}")
sys.exit(1)
# --- Modificar generate_scl_or_markdown para pasar project_root_dir ---
def generate_scl_or_markdown(processed_json_filepath, output_directory, project_root_dir): # <-- Nuevo argumento
"""
Genera un archivo SCL o Markdown a partir del JSON procesado,
llamando a la función generadora apropiada y escribiendo el archivo.
Ahora pasa project_root_dir a los generadores relevantes.
"""
if not os.path.exists(processed_json_filepath):
print(
f"Error: Archivo JSON procesado no encontrado en '{processed_json_filepath}'"
)
print(f"Error: JSON no encontrado: '{processed_json_filepath}'")
return
print(f"Cargando JSON procesado desde: {processed_json_filepath}")
try:
with open(processed_json_filepath, "r", encoding="utf-8") as f:
data = json.load(f)
except Exception as e:
print(f"Error al cargar o parsear JSON: {e}")
except Exception as e: print(f"Error al cargar/parsear JSON: {e}"); traceback.print_exc(); return
block_name = data.get("block_name", "UnknownBlock")
block_type = data.get("block_type", "Unknown")
scl_block_name = format_variable_name(block_name)
output_content = []
output_extension = ".scl"
print(f"Generando salida para: {block_type} '{scl_block_name}' (Original: {block_name})")
# --- Selección del Generador y Extensión ---
generation_function = None
# Pasar project_root_dir a las funciones que lo necesiten
func_args = {'data': data}
if block_type == "GlobalDB":
print(" -> Modo de generación: DATA_BLOCK SCL")
generation_function = generate_scl_for_db
func_args['project_root_dir'] = project_root_dir # <--- Pasar la ruta raíz
output_extension = ".scl"
elif block_type in ["FC", "FB", "OB"]:
print(f" -> Modo de generación: {block_type} SCL")
generation_function = generate_scl_for_code_block
# generate_scl_for_code_block también usa generate_scl_declarations internamente
# así que también necesita la ruta raíz.
func_args['project_root_dir'] = project_root_dir # <--- Pasar la ruta raíz
output_extension = ".scl"
elif block_type == "PlcUDT":
print(" -> Modo de generación: UDT Markdown")
generation_function = generate_udt_markdown
# generate_udt_markdown no necesita buscar otros archivos por ahora
output_extension = ".md"
elif block_type == "PlcTagTable":
print(" -> Modo de generación: Tag Table Markdown")
generation_function = generate_tag_table_markdown
output_extension = ".md"
else:
print(f"Error: Tipo de bloque desconocido '{block_type}'. No se generará archivo.")
return
# --- Llamar a la función generadora ---
if generation_function:
try:
# Pasar argumentos desempaquetados
output_content = generation_function(**func_args)
except Exception as gen_e:
print(f"Error durante la generación de contenido para {block_type} '{scl_block_name}': {gen_e}")
traceback.print_exc()
return
# --- Extracción de Información del Bloque (Común) ---
block_name = data.get("block_name", "UnknownBlock")
block_number = data.get("block_number")
block_lang_original = data.get("language", "Unknown") # Será "DB" para Data Blocks
block_type = data.get("block_type", "Unknown") # FC, FB, GlobalDB
block_comment = data.get("block_comment", "")
scl_block_name = format_variable_name(block_name) # Nombre SCL seguro
print(
f"Generando SCL para: {block_type} '{scl_block_name}' (Original: {block_name}, Lang: {block_lang_original})"
)
scl_output = []
# --- Escritura del Archivo de Salida ---
# Usar el directorio del XML original para la salida final
output_filename_base = f"{scl_block_name}{output_extension}"
# El output_directory ya viene calculado correctamente desde __main__
output_filepath = os.path.join(output_directory, output_filename_base)
# --- GENERACIÓN PARA DATA BLOCK (DB) ---
if block_lang_original == "DB":
print("Modo de generación: DATA_BLOCK")
scl_output.append(f"// Block Type: {block_type}")
scl_output.append(f"// Block Name (Original): {block_name}")
if block_number:
scl_output.append(f"// Block Number: {block_number}")
if block_comment:
scl_output.append(f"// Block Comment: {block_comment}")
scl_output.append("")
scl_output.append(f'DATA_BLOCK "{scl_block_name}"')
scl_output.append("{ S7_Optimized_Access := 'TRUE' }") # Asumir optimizado
scl_output.append("VERSION : 0.1")
scl_output.append("")
interface_data = data.get("interface", {})
static_vars = interface_data.get("Static", [])
if static_vars:
scl_output.append("VAR")
scl_output.extend(generate_scl_declarations(static_vars, indent_level=1))
scl_output.append("END_VAR")
scl_output.append("")
else:
print(
"Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB."
)
scl_output.append("VAR")
scl_output.append("END_VAR")
scl_output.append("")
scl_output.append("BEGIN")
scl_output.append("")
scl_output.append("END_DATA_BLOCK")
# --- GENERACIÓN PARA FUNCTION BLOCK / FUNCTION (FC/FB) ---
else:
print("Modo de generación: FUNCTION_BLOCK / FUNCTION")
scl_block_keyword = "FUNCTION_BLOCK" if block_type == "FB" else "FUNCTION"
# Cabecera del Bloque
scl_output.append(f"// Block Type: {block_type}")
scl_output.append(f"// Block Name (Original): {block_name}")
if block_number:
scl_output.append(f"// Block Number: {block_number}")
scl_output.append(f"// Original Language: {block_lang_original}")
if block_comment:
scl_output.append(f"// Block Comment: {block_comment}")
scl_output.append("")
# Manejar tipo de retorno para FUNCTION
return_type = "Void" # Default
interface_data = data.get("interface", {})
if scl_block_keyword == "FUNCTION" and interface_data.get("Return"):
return_member = interface_data["Return"][
0
] # Asumir un solo valor de retorno
return_type_raw = return_member.get("datatype", "Void")
return_type = (
return_type_raw.strip('"')
if return_type_raw.startswith('"') and return_type_raw.endswith('"')
else return_type_raw
)
# Añadir comillas si es UDT
if return_type != return_type_raw:
return_type = f'"{return_type}"'
scl_output.append(
f'{scl_block_keyword} "{scl_block_name}" : {return_type}'
if scl_block_keyword == "FUNCTION"
else f'{scl_block_keyword} "{scl_block_name}"'
)
scl_output.append("{ S7_Optimized_Access := 'TRUE' }")
scl_output.append("VERSION : 0.1")
scl_output.append("")
# Declaraciones de Interfaz FC/FB
section_order = [
"Input",
"Output",
"InOut",
"Static",
"Temp",
"Constant",
] # Return ya está en cabecera
declared_temps = set()
for section_name in section_order:
vars_in_section = interface_data.get(section_name, [])
if vars_in_section:
scl_section_keyword = f"VAR_{section_name.upper()}"
if section_name == "Static":
scl_section_keyword = "VAR_STAT"
if section_name == "Temp":
scl_section_keyword = "VAR_TEMP"
if section_name == "Constant":
scl_section_keyword = "CONSTANT"
scl_output.append(scl_section_keyword)
scl_output.extend(
generate_scl_declarations(vars_in_section, indent_level=1)
)
if section_name == "Temp":
declared_temps.update(
format_variable_name(v.get("name"))
for v in vars_in_section
if v.get("name")
)
scl_output.append("END_VAR")
scl_output.append("")
# Declaraciones VAR_TEMP adicionales detectadas
temp_vars = set()
temp_pattern = re.compile(
r'"?#(_temp_[a-zA-Z0-9_]+)"?|"?(_temp_[a-zA-Z0-9_]+)"?'
)
for network in data.get("networks", []):
for instruction in network.get("logic", []):
scl_code = instruction.get("scl", "")
edge_update_code = instruction.get("_edge_mem_update_scl", "")
code_to_scan = (
(scl_code if scl_code else "")
+ "\n"
+ (edge_update_code if edge_update_code else "")
)
if code_to_scan:
found_temps = temp_pattern.findall(code_to_scan)
for temp_tuple in found_temps:
temp_name = next((t for t in temp_tuple if t), None)
if temp_name:
temp_vars.add(
"#" + temp_name
if not temp_name.startswith("#")
else temp_name
)
additional_temps = sorted(list(temp_vars - declared_temps))
if additional_temps:
if not interface_data.get("Temp"):
scl_output.append("VAR_TEMP")
for var_name in additional_temps:
scl_name = format_variable_name(var_name)
inferred_type = "Bool" # Asumir Bool
scl_output.append(
f" {scl_name} : {inferred_type}; // Auto-generated temporary"
)
if not interface_data.get("Temp"):
scl_output.append("END_VAR")
scl_output.append("")
# Cuerpo del Bloque FC/FB
scl_output.append("BEGIN")
scl_output.append("")
# Iterar por redes y lógica (como antes, incluyendo manejo STL Markdown)
for i, network in enumerate(data.get("networks", [])):
network_title = network.get("title", f'Network {network.get("id")}')
network_comment = network.get("comment", "")
network_lang = network.get("language", "LAD")
scl_output.append(
f" // Network {i+1}: {network_title} (Original Language: {network_lang})"
)
if network_comment:
for line in network_comment.splitlines():
scl_output.append(f" // {line}")
scl_output.append("")
network_has_code = False
if network_lang == "STL":
network_has_code = True
if (
network.get("logic")
and network["logic"][0].get("type") == "RAW_STL_CHUNK"
):
raw_stl_code = network["logic"][0].get(
"stl", "// ERROR: STL code missing"
)
scl_output.append(f" {'//'} ```STL")
for stl_line in raw_stl_code.splitlines():
scl_output.append(f" {stl_line}")
scl_output.append(f" {'//'} ```")
else:
scl_output.append(" // ERROR: Contenido STL inesperado.")
else: # LAD, FBD, SCL, etc.
for instruction in network.get("logic", []):
instruction_type = instruction.get("type", "")
scl_code = instruction.get("scl", "")
is_grouped = instruction.get("grouped", False)
if is_grouped:
continue
if (
instruction_type.endswith(SCL_SUFFIX)
or instruction_type in ["RAW_SCL_CHUNK", "UNSUPPORTED_LANG"]
) and scl_code:
is_only_comment = all(
line.strip().startswith("//")
for line in scl_code.splitlines()
if line.strip()
)
is_if_block = scl_code.strip().startswith("IF")
if not is_only_comment or is_if_block:
network_has_code = True
for line in scl_code.splitlines():
scl_output.append(f" {line}")
if network_has_code:
scl_output.append("")
else:
scl_output.append(f" // Network did not produce printable SCL code.")
scl_output.append("")
# Fin del bloque FC/FB
scl_output.append(f"END_{scl_block_keyword}")
# --- Escritura del Archivo SCL (Común) ---
print(f"Escribiendo archivo SCL en: {output_scl_filepath}")
print(f" -> Escribiendo archivo de salida en: {output_filepath}")
try:
with open(output_scl_filepath, "w", encoding="utf-8") as f:
for line in scl_output:
f.write(line + "\n")
print("Generación de SCL completada.")
except Exception as e:
print(f"Error al escribir el archivo SCL: {e}")
traceback.print_exc()
os.makedirs(os.path.dirname(output_filepath), exist_ok=True) # Asegurar que el directorio exista
with open(output_filepath, "w", encoding="utf-8") as f:
for line in output_content: f.write(line + "\n")
print(f"Generación de {output_extension.upper()} completada.")
except Exception as e: print(f"Error al escribir el archivo {output_extension.upper()}: {e}"); traceback.print_exc()
# --- Ejecución ---
if __name__ == "__main__":
# Imports necesarios solo para la ejecución como script principal
import argparse
import os
import sys
import traceback # Asegurarse que traceback está importado si se usa en generate_scl
parser = argparse.ArgumentParser(description="Generate final SCL or Markdown file.")
parser.add_argument("source_xml_filepath", help="Path to the original source XML file.")
# Nuevo argumento para la ruta raíz del proyecto
parser.add_argument("project_root_dir", help="Path to the root directory of the XML project structure.") # <--- Nuevo argumento
args = parser.parse_args()
source_xml_file = args.source_xml_filepath
project_root_dir = args.project_root_dir # <--- Capturar la ruta raíz
# Configurar ArgumentParser para recibir la ruta del XML original obligatoria
parser = argparse.ArgumentParser(
description="Generate final SCL file from processed JSON (_simplified_processed.json). Expects original XML filepath as argument."
)
parser.add_argument(
"source_xml_filepath", # Argumento posicional obligatorio
help="Path to the original source XML file (passed from x0_main.py, used to derive input/output names).",
)
args = parser.parse_args() # Parsea los argumentos de sys.argv
if not os.path.exists(source_xml_file): print(f"Advertencia (x3): Archivo XML original no encontrado: '{source_xml_file}'.")
source_xml_file = args.source_xml_filepath # Obtiene la ruta del XML original
# Verificar si el archivo XML original existe (como referencia)
if not os.path.exists(source_xml_file):
print(
f"Advertencia (x3): Archivo XML original no encontrado: '{source_xml_file}', pero se intentará encontrar el JSON procesado."
)
# No salir necesariamente.
# Derivar nombres de archivos de entrada (JSON procesado) y salida (SCL)
xml_filename_base = os.path.splitext(os.path.basename(source_xml_file))[0]
# Asumir que los archivos están en el mismo directorio que el XML original
base_dir = os.path.dirname(source_xml_file) # Directorio del XML original
xml_dir = os.path.dirname(source_xml_file) # Directorio del XML original
parsing_dir = os.path.join(xml_dir, "parsing") # Directorio de parsing relativo al XML
input_json_file = os.path.join(parsing_dir, f"{xml_filename_base}_processed.json")
input_json_file = os.path.join(
base_dir, f"{xml_filename_base}_simplified_processed.json"
)
output_scl_file = os.path.join(
base_dir, f"{xml_filename_base}_simplified_processed.scl"
)
# El directorio de salida final será el mismo que el del XML original
output_dir = xml_dir
print(
f"(x3) Generando SCL: '{os.path.relpath(input_json_file)}' -> '{os.path.relpath(output_scl_file)}'"
)
print(f"(x3) Generando SCL/MD desde: '{os.path.relpath(input_json_file)}' en directorio: '{os.path.relpath(output_dir)}'")
print(f"(x3) Usando ruta raíz del proyecto: '{project_root_dir}' para buscar UDTs.") # Informar
# Verificar si el archivo JSON procesado de entrada EXISTE
if not os.path.exists(input_json_file):
print(
f"Error Fatal (x3): Archivo JSON procesado no encontrado: '{input_json_file}'"
)
print(
f"Asegúrate de que 'x2_process.py' se ejecutó correctamente para '{os.path.relpath(source_xml_file)}'."
)
sys.exit(1) # Salir si el archivo necesario no está
print(f"Error Fatal (x3): JSON procesado no encontrado: '{input_json_file}'"); sys.exit(1)
else:
# Llamar a la función principal de generación SCL del script
# Asumiendo que tu función principal se llama generate_scl(input_json_path, output_scl_path)
try:
generate_scl(input_json_file, output_scl_file)
except Exception as e:
print(
f"Error Crítico (x3) durante la generación de SCL desde '{input_json_file}': {e}"
)
# traceback ya debería estar importado si generate_scl lo necesita
traceback.print_exc()
sys.exit(1) # Salir con error si la función principal falla
# Pasar project_root_dir a la función principal
generate_scl_or_markdown(input_json_file, output_dir, project_root_dir) # <--- Pasar la ruta raíz
sys.exit(0)
except Exception as e: print(f"Error Crítico (x3): {e}"); traceback.print_exc(); sys.exit(1)

View File

@ -1,144 +0,0 @@
# -*- coding: utf-8 -*-
import os
import sys
import re
import argparse
# Directorio donde se crearán los archivos de procesador
PROCESSORS_DIR = "processors"
# Cabecera estándar para añadir a cada nuevo archivo
FILE_HEADER = """# -*- coding: utf-8 -*-
# TODO: Import necessary functions from processor_utils
# Example: from .processor_utils import get_scl_representation, format_variable_name
# Or: import processors.processor_utils as utils
# TODO: Define constants if needed (e.g., SCL_SUFFIX) or import them
SCL_SUFFIX = "_scl"
# --- Function code starts ---
"""
# Pie de página estándar con la función get_processor_info de plantilla
def get_file_footer(func_name):
"""Generates the standard footer with a placeholder get_processor_info."""
type_name_guess = func_name.replace('process_', '')
return f"""
# --- Function code ends ---
# --- Processor Information Function ---
def get_processor_info():
\"\"\"Returns the type name and processing function for this module.\"\"\"
# TODO: Adjust the type_name if needed (e.g., call, edge_detector, comparison, math).
# TODO: Return a list if this module handles multiple types (e.g., PBox/NBox, FC/FB).
type_name = "{type_name_guess}" # Basic guess
return {{'type_name': type_name, 'processor_func': {func_name}}}
"""
def extract_and_create_processors(source_py_file):
"""
Extracts top-level functions starting with 'process_' from the source file
and creates individual processor files in the PROCESSORS_DIR, copying
the entire function body until the next top-level definition.
"""
if not os.path.exists(source_py_file):
print(f"Error: Source file not found: '{source_py_file}'")
return
print(f"Reading source file: '{source_py_file}'")
try:
with open(source_py_file, 'r', encoding='utf-8') as f:
lines = f.readlines()
except Exception as e:
print(f"Error reading source file: {e}")
return
os.makedirs(PROCESSORS_DIR, exist_ok=True)
print(f"Ensuring '{PROCESSORS_DIR}' directory exists.")
print("Searching for processor functions (def process_...):")
processor_functions = [] # Store tuples of (name, start_line_index, end_line_index)
current_func_start = -1
current_func_name = None
# Pattern to find ANY top-level function definition
any_func_def_pattern = re.compile(r"^def\s+(\w+)\s*\(")
# Pattern specific to processor functions
process_func_def_pattern = re.compile(r"^def\s+(process_\w+)\s*\(")
# First pass: Identify start and end lines of all top-level functions
for i, line in enumerate(lines):
match = any_func_def_pattern.match(line)
if match:
# Found a new top-level function definition
if current_func_name is not None:
# Mark the end of the *previous* function
# Only add if it was a 'process_' function
if current_func_name.startswith("process_"):
processor_functions.append((current_func_name, current_func_start, i))
# Start tracking the new function
current_func_name = match.group(1)
current_func_start = i
# Add the last function found in the file (if it was a process_ function)
if current_func_name is not None and current_func_name.startswith("process_"):
processor_functions.append((current_func_name, current_func_start, len(lines)))
# Second pass: Create files using the identified line ranges
processor_count = 0
if not processor_functions:
print("\nWarning: No functions starting with 'process_' found at the top level.")
return
print(f"Found {len(processor_functions)} potential processor functions.")
for func_name, start_idx, end_idx in processor_functions:
print(f" - Processing: {func_name} (lines {start_idx+1}-{end_idx})")
func_lines = lines[start_idx:end_idx] # Extract lines for this function
# Remove trailing blank lines from the extracted block, often happens before next def
while func_lines and func_lines[-1].strip() == "":
func_lines.pop()
create_processor_file(func_name, func_lines)
processor_count += 1
print(f"\nFinished processing. Attempted to create/check {processor_count} processor files in '{PROCESSORS_DIR}'.")
def create_processor_file(func_name, func_lines):
"""Creates the individual processor file if it doesn't exist."""
target_filename = f"{func_name}.py"
target_filepath = os.path.join(PROCESSORS_DIR, target_filename)
if os.path.exists(target_filepath):
print(f" * Skipping: '{target_filename}' already exists.")
return
print(f" * Creating: '{target_filename}'...")
try:
with open(target_filepath, 'w', encoding='utf-8') as f:
f.write(FILE_HEADER)
# Write the function lines, ensuring consistent newline endings
for line in func_lines:
f.write(line.rstrip() + '\n')
f.write(get_file_footer(func_name))
except Exception as e:
print(f" Error writing file '{target_filename}': {e}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Extracts 'process_*' functions from a source Python file "
"and creates individual processor files."
)
parser.add_argument(
"source_file",
default="x2_process.py", # Valor por defecto
nargs='?', # Hacerlo opcional para que use el default
help="Path to the source Python file (default: x2_process.py)"
)
args = parser.parse_args()
extract_and_create_processors(args.source_file)

BIN
log.txt

Binary file not shown.

View File

@ -1,87 +0,0 @@
# processors/process_add.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re # Importar re si se usa para formateo
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed" # Usar el nuevo sufijo
def process_add(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera SCL para Add, simplificando la condición EN."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Add")
current_type = instruction.get("type","")
if current_type.endswith(SCL_SUFFIX) or "_error" in current_type:
return False
# Obtener EN (SymPy), IN1, IN2 (SymPy o Constante/String)
en_input = instruction["inputs"].get("en")
in1_info = instruction["inputs"].get("in1")
in2_info = instruction["inputs"].get("in2")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
op1_sympy_or_const = get_sympy_representation(in1_info, network_id, sympy_map, symbol_manager)
op2_sympy_or_const = get_sympy_representation(in2_info, network_id, sympy_map, symbol_manager)
# Obtener destino SCL
target_scl_name = get_target_scl_name(instruction, "out", network_id, default_to_temp=True)
# Verificar dependencias
if sympy_en_expr is None or op1_sympy_or_const is None or op2_sympy_or_const is None or target_scl_name is None:
# print(f"DEBUG Add {instr_uid}: Dependency not ready")
return False
# Convertir operandos SymPy/Constante a SCL strings
op1_scl = sympy_expr_to_scl(op1_sympy_or_const, symbol_manager)
op2_scl = sympy_expr_to_scl(op2_sympy_or_const, symbol_manager)
# Añadir paréntesis si contienen operadores (más seguro para SCL)
op1_scl_formatted = f"({op1_scl})" if re.search(r'[+\-*/ ]', op1_scl) else op1_scl
op2_scl_formatted = f"({op2_scl})" if re.search(r'[+\-*/ ]', op2_scl) else op2_scl
# Generar SCL Core
scl_core = f"{target_scl_name} := {op1_scl_formatted} + {op2_scl_formatted};"
# Aplicar Condición EN (Simplificando EN)
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for {instr_type_original} {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
# Evitar IF TRUE THEN...
if en_condition_scl == "TRUE":
scl_final = scl_core
# Evitar IF FALSE THEN...
elif en_condition_scl == "FALSE":
scl_final = f"// {instr_type_original} {instr_uid} condition simplified to FALSE."
else:
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# Actualizar instrucción y mapa
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar valor de salida (nombre SCL del destino) y ENO (expresión SymPy)
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = target_scl_name # Guardar nombre del destino (string)
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr # Guardar la expresión SymPy para ENO
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el procesador Add."""
# Asegurar que la clave coincida con el tipo en JSON ('add')
return {'type_name': 'add', 'processor_func': process_add, 'priority': 4}

View File

@ -1,118 +0,0 @@
# processors/process_blkmov.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed" # Usar el nuevo sufijo
def process_blkmov(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL usando BLKMOV directamente como nombre de función,
simplificando la condición EN.
ADVERTENCIA: Sintaxis BLKMOV probablemente no compile en TIA estándar.
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "BlkMov") # Asegurar que el tipo base sea correcto
current_type = instruction.get("type","")
if current_type.endswith(SCL_SUFFIX) or "_error" in current_type:
return False
# --- Obtener Entradas ---
en_input = instruction["inputs"].get("en")
# Obtener EN como expresión SymPy
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
srcblk_info = instruction["inputs"].get("SRCBLK")
# Obtener nombre RAW de SRCBLK (como se hacía antes, si es necesario para BLKMOV)
# Este nombre NO pasa por SymPy, se usa directo en el string SCL final
raw_srcblk_name = srcblk_info.get("name") if srcblk_info else None
# Verificar dependencias (EN debe estar resuelto, SRCBLK debe tener nombre)
if sympy_en_expr is None:
# print(f"DEBUG BlkMov {instr_uid}: EN dependency not ready")
return False
if raw_srcblk_name is None:
print(f"Error: BLKMOV {instr_uid} sin información válida para SRCBLK.")
instruction["scl"] = f"// ERROR: BLKMOV {instr_uid} sin SRCBLK válido."
instruction["type"] = instr_type_original + "_error"
return True
# --- Obtener Destinos (Salidas) ---
# RET_VAL (Obtener nombre SCL formateado)
retval_target_scl = get_target_scl_name(instruction, "RET_VAL", network_id, default_to_temp=True)
if retval_target_scl is None: # get_target_scl_name ya imprime error si falla y default_to_temp=True
instruction["scl"] = f"// ERROR: BLKMOV {instr_uid} no pudo generar destino RET_VAL"
instruction["type"] = instr_type_original + "_error"
return True
# DSTBLK (Obtener nombre RAW como antes, si se necesita)
raw_dstblk_name = None
dstblk_output_list = instruction.get("outputs", {}).get("DSTBLK", [])
if dstblk_output_list and isinstance(dstblk_output_list, list) and len(dstblk_output_list) == 1:
dest_access = dstblk_output_list[0]
if dest_access.get("type") == "variable":
raw_dstblk_name = dest_access.get("name")
# Manejar error si no se encuentra DSTBLK
if raw_dstblk_name is None:
print(f"Error: No se encontró un destino único y válido para DSTBLK en BLKMOV {instr_uid}.")
instruction["scl"] = f"// ERROR: BLKMOV {instr_uid} sin destino DSTBLK válido."
instruction["type"] = instr_type_original + "_error"
return True
# --- Formateo especial (mantener nombres raw si es necesario para BLKMOV) ---
# Estos nombres van directo al string SCL, no necesitan pasar por SymPy
srcblk_final_str = raw_srcblk_name # Asumiendo que ya viene con comillas si las necesita
dstblk_final_str = raw_dstblk_name # Asumiendo que ya viene con comillas si las necesita
# --- Generar SCL Core (Usando la sintaxis no estándar BLKMOV) ---
scl_core = (
f"{retval_target_scl} := BLKMOV(SRCBLK := {srcblk_final_str}, "
f"DSTBLK => {dstblk_final_str}); " # Usar => para Out/InOut
f"// ADVERTENCIA: BLKMOV usado directamente, probablemente no compile!"
)
# --- Aplicar Condición EN (Simplificando EN) ---
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for {instr_type_original} {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
# Evitar IF TRUE/FALSE THEN...
if en_condition_scl == "TRUE":
scl_final = scl_core
elif en_condition_scl == "FALSE":
scl_final = f"// {instr_type_original} {instr_uid} condition simplified to FALSE."
else:
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# --- Actualizar Instrucción y Mapa SymPy ---
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar ENO (expresión SymPy)
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr
# Propagar el valor de retorno (nombre SCL string del destino de RET_VAL)
map_key_ret_val = (network_id, instr_uid, "RET_VAL")
sympy_map[map_key_ret_val] = retval_target_scl
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el procesador BLKMOV."""
# Asegurarse que el type_name coincida con el JSON ('blkmov' parece probable)
return {'type_name': 'blkmov', 'processor_func': process_blkmov, 'priority': 6}

View File

@ -1,131 +0,0 @@
# processors/process_call.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Asumiendo que estas funciones ahora existen y están adaptadas
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name, get_target_scl_name
from .symbol_manager import SymbolManager # Necesitamos pasar el symbol_manager
# Definir sufijo globalmente o importar
SCL_SUFFIX = "_sympy_processed"
def process_call(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "") # Tipo antes de añadir sufijo
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
block_name = instruction.get("block_name", f"UnknownCall_{instr_uid}")
block_type = instruction.get("block_type") # FC, FB
instance_db = instruction.get("instance_db") # Nombre del DB de instancia (para FB)
# Formatear nombres SCL (para la llamada final)
block_name_scl = format_variable_name(block_name)
instance_db_scl = format_variable_name(instance_db) if instance_db else None
# --- Manejo de EN ---
en_input = instruction["inputs"].get("en")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
if sympy_en_expr is None:
# print(f"DEBUG Call {instr_uid}: EN dependency not ready.")
return False # Dependencia EN no resuelta
# --- Procesar Parámetros de Entrada ---
scl_call_params = []
processed_inputs = {"en"}
dependencies_resolved = True
# Ordenar para consistencia
input_pin_names = sorted(instruction.get("inputs", {}).keys())
for pin_name in input_pin_names:
if pin_name not in processed_inputs:
source_info = instruction["inputs"][pin_name]
# Obtener la representación de la fuente (puede ser SymPy o Constante/String)
source_sympy_or_const = get_sympy_representation(source_info, network_id, sympy_map, symbol_manager)
if source_sympy_or_const is None:
# print(f"DEBUG Call {instr_uid}: Input param '{pin_name}' dependency not ready.")
dependencies_resolved = False
break # Salir si una dependencia no está lista
# Convertir la expresión/constante a SCL para la llamada
# Simplificar ANTES de convertir? Probablemente no necesario para parámetros de entrada
# a menos que queramos optimizar el valor pasado. Por ahora, convertir directo.
param_scl_value = sympy_expr_to_scl(source_sympy_or_const, symbol_manager)
# El nombre del pin SÍ necesita formateo
pin_name_scl = format_variable_name(pin_name)
scl_call_params.append(f"{pin_name_scl} := {param_scl_value}")
processed_inputs.add(pin_name)
if not dependencies_resolved:
return False
# --- Construcción de la Llamada SCL (similar a antes) ---
scl_call_body = ""
param_string = ", ".join(scl_call_params)
if block_type == "FB":
if not instance_db_scl:
print(f"Error: Call FB '{block_name_scl}' (UID {instr_uid}) sin instancia.")
instruction["scl"] = f"// ERROR: FB Call {block_name_scl} sin instancia"
instruction["type"] = f"Call_FB_error"
return True
scl_call_body = f"{instance_db_scl}({param_string});"
elif block_type == "FC":
scl_call_body = f"{block_name_scl}({param_string});"
else:
print(f"Advertencia: Tipo de bloque no soportado para Call UID {instr_uid}: {block_type}")
scl_call_body = f"// ERROR: Call a bloque tipo '{block_type}' no soportado: {block_name_scl}"
instruction["type"] = f"Call_{block_type}_error" # Marcar como error
# --- Aplicar Condición EN (usando la expresión SymPy EN) ---
scl_final = ""
if sympy_en_expr != sympy.true:
# Simplificar la condición EN ANTES de convertirla a SCL
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for Call {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
indented_call = "\n".join([f" {line}" for line in scl_call_body.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_call}\nEND_IF;"
else:
scl_final = scl_call_body
# --- Actualizar Instrucción y Mapa SymPy ---
instruction["scl"] = scl_final # Guardar el SCL final generado
instruction["type"] = (f"Call_{block_type}{SCL_SUFFIX}" if "_error" not in instruction["type"] else instruction["type"])
# Actualizar sympy_map con el estado ENO (es la expresión SymPy de EN)
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr # Guardar la expresión SymPy para ENO
# Propagar valores de salida (requiere info de interfaz o heurística)
# Si se sabe que hay una salida 'MyOutput', se podría añadir su SCL al mapa
# Ejemplo MUY simplificado:
# for pin_name, dest_list in instruction.get("outputs", {}).items():
# if pin_name != 'eno' and dest_list: # Asumir que hay un destino
# map_key_out = (network_id, instr_uid, pin_name)
# if block_type == "FB" and instance_db_scl:
# sympy_map[map_key_out] = f"{instance_db_scl}.{format_variable_name(pin_name)}" # Guardar el *string* de acceso SCL
# # Para FCs es más complejo, necesitaría asignación explícita a temp
# # else: # FC output -> necesita temp var
# # temp_var = generate_temp_var_name(...)
# # sympy_map[map_key_out] = temp_var
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para las llamadas a FC y FB."""
return [
{'type_name': 'call_fc', 'processor_func': process_call, 'priority': 6},
{'type_name': 'call_fb', 'processor_func': process_call, 'priority': 6}
]

View File

@ -1,82 +0,0 @@
# processors/process_coil.py
import sympy
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed"
def process_coil(instruction, network_id, sympy_map, symbol_manager, data):
"""Genera la asignación SCL para Coil, simplificando la entrada SymPy."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Coil")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Get input expression from SymPy map
coil_input_info = instruction["inputs"].get("in")
sympy_expr_in = get_sympy_representation(coil_input_info, network_id, sympy_map, symbol_manager)
# Get target variable SCL name
target_scl_name = get_target_scl_name(instruction, "operand", network_id, default_to_temp=False) # Coil must have explicit target
# Check dependencies
if sympy_expr_in is None:
# print(f"DEBUG Coil {instr_uid}: Input dependency not ready.")
return False
if target_scl_name is None:
print(f"Error: Coil {instr_uid} operando no es variable o falta info.")
instruction["scl"] = f"// ERROR: Coil {instr_uid} operando no es variable."
instruction["type"] = instr_type_original + "_error"
return True # Processed with error
# *** Perform Simplification ***
try:
#simplified_expr = sympy.simplify_logic(sympy_expr_in, force=False)
#simplified_expr = sympy_expr_in
simplified_expr = sympy.logic.boolalg.to_dnf(sympy_expr_in, simplify=True)
except Exception as e:
print(f"Error during SymPy simplification for Coil {instr_uid}: {e}")
simplified_expr = sympy_expr_in # Fallback to original expression
# *** Convert simplified expression back to SCL string ***
condition_scl = sympy_expr_to_scl(simplified_expr, symbol_manager)
# Generate the final SCL assignment
scl_assignment = f"{target_scl_name} := {condition_scl};"
scl_final = scl_assignment
# --- Handle Edge Detector Memory Update (Logic similar to before) ---
# Check if input comes from PBox/NBox and append memory update
mem_update_scl_combined = None
if isinstance(coil_input_info, dict) and coil_input_info.get("type") == "connection":
source_uid = coil_input_info.get("source_instruction_uid")
source_pin = coil_input_info.get("source_pin")
source_instruction = None
network_logic = next((net["logic"] for net in data["networks"] if net["id"] == network_id), [])
for instr in network_logic:
if instr.get("instruction_uid") == source_uid:
source_instruction = instr
break
if source_instruction:
# Check for the original type before suffix was added
orig_source_type = source_instruction.get("type", "").replace(SCL_SUFFIX, '').replace('_error', '')
if orig_source_type in ["PBox", "NBox"] and '_edge_mem_update_scl' in source_instruction:
mem_update_scl_combined = source_instruction.get('_edge_mem_update_scl')
if mem_update_scl_combined:
scl_final = f"{scl_assignment}\n{mem_update_scl_combined}"
# Clear the source SCL?
source_instruction['scl'] = f"// Edge Logic handled by Coil {instr_uid}"
# Update instruction
instruction["scl"] = scl_final
instruction["type"] = instr_type_original + SCL_SUFFIX
# Coil typically doesn't output to scl_map
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el procesador Coil."""
return {'type_name': 'coil', 'processor_func': process_coil, 'priority': 3}

View File

@ -1,87 +0,0 @@
# processors/process_comparison.py
# -*- coding: utf-8 -*-
import sympy
import traceback
from .processor_utils import get_sympy_representation, format_variable_name # No necesita sympy_expr_to_scl aquí
from .symbol_manager import SymbolManager # Necesita acceso al manager
SCL_SUFFIX = "_sympy_processed"
def process_comparison(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera la expresión SymPy para Comparadores (GT, LT, GE, LE, NE).
El resultado se propaga por sympy_map['out'].
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "") # GT, LT, GE, LE, NE
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Mapa de tipos a funciones/clases SymPy Relational
# Nota: Asegúrate de que los tipos coincidan (ej. si son números o booleanos)
op_map = {
"GT": sympy.Gt, # Greater Than >
"LT": sympy.Lt, # Less Than <
"GE": sympy.Ge, # Greater or Equal >=
"LE": sympy.Le, # Less or Equal <=
"NE": sympy.Ne # Not Equal <> (sympy.Ne maneja esto)
}
sympy_relation_func = op_map.get(instr_type_original.upper())
if not sympy_relation_func:
instruction["scl"] = f"// ERROR: Tipo de comparación no soportado para SymPy: {instr_type_original}"
instruction["type"] = instr_type_original + "_error"
return True
# Obtener operandos como expresiones SymPy o constantes/strings
in1_info = instruction["inputs"].get("in1")
in2_info = instruction["inputs"].get("in2")
op1_sympy = get_sympy_representation(in1_info, network_id, sympy_map, symbol_manager)
op2_sympy = get_sympy_representation(in2_info, network_id, sympy_map, symbol_manager)
# Obtener 'pre' (RLO anterior) como expresión SymPy
pre_input = instruction["inputs"].get("pre") # Asumiendo que 'pre' es la entrada RLO
sympy_pre_rlo = get_sympy_representation(pre_input, network_id, sympy_map, symbol_manager) if pre_input else sympy.true
# Verificar dependencias
if op1_sympy is None or op2_sympy is None or sympy_pre_rlo is None:
# print(f"DEBUG Comparison {instr_uid}: Dependency not ready")
return False
# Crear la expresión de comparación SymPy
try:
# Convertir constantes string a número si es posible (Sympy puede necesitarlo)
# Esto es heurístico y puede fallar. Mejor si los tipos son conocidos.
op1_eval = sympy.sympify(op1_sympy) if isinstance(op1_sympy, str) else op1_sympy
op2_eval = sympy.sympify(op2_sympy) if isinstance(op2_sympy, str) else op2_sympy
comparison_expr = sympy_relation_func(op1_eval, op2_eval)
except (SyntaxError, TypeError, ValueError) as e:
print(f"Error creating SymPy comparison for {instr_uid}: {e}")
instruction["scl"] = f"// ERROR creando expr SymPy Comparison {instr_uid}: {e}"
instruction["type"] = instr_type_original + "_error"
return True
# Guardar resultado en el mapa para 'out' (es una expresión booleana SymPy)
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = comparison_expr
# Guardar el RLO de entrada ('pre') como ENO en el mapa SymPy
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_pre_rlo
# Marcar como procesado, SCL principal es solo comentario
instruction["scl"] = f"// SymPy Comparison {instr_type_original}: {comparison_expr}" # Comentario opcional
instruction["type"] = instr_type_original + SCL_SUFFIX
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para los comparadores (excepto EQ, que debe ser similar)."""
return [
{'type_name': 'gt', 'processor_func': process_comparison, 'priority': 2},
{'type_name': 'lt', 'processor_func': process_comparison, 'priority': 2},
{'type_name': 'ge', 'processor_func': process_comparison, 'priority': 2},
{'type_name': 'le', 'processor_func': process_comparison, 'priority': 2},
{'type_name': 'ne', 'processor_func': process_comparison, 'priority': 2}
# Asegúrate de tener también un procesador para 'eq' usando sympy.Eq
]

View File

@ -1,60 +0,0 @@
# processors/process_contact.py
import sympy
from .processor_utils import get_sympy_representation, format_variable_name # Use new util
from .symbol_manager import SymbolManager, extract_plc_variable_name # Need symbol manager access
# Define SCL_SUFFIX or import if needed globally
SCL_SUFFIX = "_sympy_processed" # Indicate processing type
def process_contact(instruction, network_id, sympy_map, symbol_manager, data): # Pass symbol_manager
"""Genera la expresión SymPy para Contact (normal o negado)."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Contact")
# Check if already processed with the new method
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
is_negated = instruction.get("negated_pins", {}).get("operand", False)
# Get incoming SymPy expression (RLO)
in_input = instruction["inputs"].get("in")
sympy_expr_in = get_sympy_representation(in_input, network_id, sympy_map, symbol_manager)
# Get operand SymPy Symbol
operand_info = instruction["inputs"].get("operand")
operand_plc_name = extract_plc_variable_name(operand_info)
sympy_symbol_operand = symbol_manager.get_symbol(operand_plc_name) if operand_plc_name else None
# Check dependencies
if sympy_expr_in is None or sympy_symbol_operand is None:
# print(f"DEBUG Contact {instr_uid}: Dependency not ready (In: {sympy_expr_in is not None}, Op: {sympy_symbol_operand is not None})")
return False # Dependencies not ready
# Apply negation using SymPy
current_term = sympy.Not(sympy_symbol_operand) if is_negated else sympy_symbol_operand
# Combine with previous RLO using SymPy
# Simplify common cases: TRUE AND X -> X
if sympy_expr_in == sympy.true:
sympy_expr_out = current_term
else:
# Could add FALSE AND X -> FALSE optimization here too
sympy_expr_out = sympy.And(sympy_expr_in, current_term)
# Store the resulting SymPy expression object in the map
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = sympy_expr_out
# Mark instruction as processed (SCL field is now less relevant here)
instruction["scl"] = f"// SymPy Contact: {sympy_expr_out}" # Optional debug comment
instruction["type"] = instr_type_original + SCL_SUFFIX # Use the new suffix
# Contact doesn't usually have ENO, it modifies the RLO ('out')
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el procesador Contact."""
# Ensure 'data' argument is added if needed by the processor function signature change
return {'type_name': 'contact', 'processor_func': process_contact, 'priority': 1}

View File

@ -1,90 +0,0 @@
# processors/process_convert.py
# -*- coding: utf-8 -*-
import sympy
import traceback
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_convert(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera SCL para Convert, tratando la conversión como una asignación."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Convert")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener EN y IN
en_input = instruction["inputs"].get("en")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
in_info = instruction["inputs"].get("in")
sympy_or_const_in = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
# Obtener destino SCL
target_scl_name = get_target_scl_name(instruction, "out", network_id, default_to_temp=True)
# Verificar dependencias
if sympy_en_expr is None or sympy_or_const_in is None or target_scl_name is None:
return False
# Convertir la entrada (SymPy o Constante) a SCL
# La simplificación aquí no suele aplicar a la conversión en sí,
# pero sí podría aplicar a la condición EN.
input_scl = sympy_expr_to_scl(sympy_or_const_in, symbol_manager)
# Determinar el tipo de destino (esto sigue siendo un desafío sin info completa)
# Usaremos funciones de conversión SCL explícitas si podemos inferirlas.
target_type_hint = instruction.get("template_values", {}).get("destType", "").upper() # Ejemplo
source_type_hint = "" # Necesitaríamos info del tipo de origen
conversion_func_name = None
# Heurística MUY básica (necesita mejorar con info de tipos real)
if target_type_hint and source_type_hint and target_type_hint != source_type_hint:
conversion_func_name = f"{source_type_hint}_TO_{target_type_hint}"
# Generar SCL Core
if conversion_func_name:
# Usar función explícita si la inferimos
scl_core = f"{target_scl_name} := {conversion_func_name}({input_scl});"
else:
# Asignación directa (MOVE implícito) si no hay conversión clara
# ADVERTENCIA: Esto puede causar errores de tipo en el PLC si los tipos no coinciden.
scl_core = f"{target_scl_name} := {input_scl};"
if target_type_hint: # Añadir comentario si al menos conocemos el destino
scl_core += f" // TODO: Verify implicit conversion to {target_type_hint}"
# Aplicar Condición EN (Simplificando EN)
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for Convert {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# Actualizar instrucción y mapa
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar valor de salida (el contenido del destino) y ENO
map_key_out = (network_id, instr_uid, "out")
# Guardar el *nombre* SCL del destino en el mapa, ya que contiene el valor
# O podríamos crear un símbolo SymPy para ello si fuera necesario aguas abajo? Por ahora, string.
sympy_map[map_key_out] = target_scl_name
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr # Guardar la expresión SymPy para ENO
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el procesador Convert."""
return {'type_name': 'convert', 'processor_func': process_convert, 'priority': 4}

View File

@ -1,110 +0,0 @@
# processors/process_counter.py
# -*- coding: utf-8 -*-
import sympy
import traceback
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name, get_target_scl_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_counter(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL para Contadores (CTU, CTD, CTUD).
Requiere datos de instancia (DB o STAT).
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "") # CTU, CTD, CTUD
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# 1. Definir pines de entrada esperados
input_pins_map = {
"CTU": ["CU", "R", "PV"],
"CTD": ["CD", "LD", "PV"],
"CTUD": ["CU", "CD", "R", "LD", "PV"]
}
input_pins = input_pins_map.get(instr_type_original.upper())
if not input_pins:
instruction["scl"] = f"// ERROR: Tipo de contador no soportado: {instr_type_original}"
instruction["type"] = instr_type_original + "_error"
return True
# 2. Procesar Parámetros de Entrada
scl_call_params = []
dependencies_resolved = True
optional_pins = {"R", "LD"} # Estos pueden no estar conectados
for pin in input_pins:
pin_info = instruction["inputs"].get(pin)
if pin_info: # Si el pin está definido en el JSON
source_sympy_or_const = get_sympy_representation(pin_info, network_id, sympy_map, symbol_manager)
if source_sympy_or_const is None:
# print(f"DEBUG Counter {instr_uid}: Input param '{pin}' dependency not ready.")
dependencies_resolved = False
break
# Convertir a SCL para la llamada (sin simplificar aquí)
param_scl_value = sympy_expr_to_scl(source_sympy_or_const, symbol_manager)
pin_name_scl = format_variable_name(pin) # Formatear nombre del parámetro
scl_call_params.append(f"{pin_name_scl} := {param_scl_value}")
elif pin not in optional_pins: # Si falta un pin requerido
print(f"Error: Falta entrada requerida '{pin}' para {instr_type_original} UID {instr_uid}.")
instruction["scl"] = f"// ERROR: Falta entrada requerida '{pin}' para {instr_type_original} UID {instr_uid}."
instruction["type"] = instr_type_original + "_error"
return True
if not dependencies_resolved:
return False
# 3. Obtener Nombre de Instancia
# Asumiendo que x1 o una fase previa llena 'instance_db' si es un FB multi-instancia
instance_name_raw = instruction.get("instance_db")
if not instance_name_raw:
# Asumiendo que es STAT si no hay DB instancia explícito (requiere declaración en x3)
instance_name_raw = instruction.get("instance_name") # Buscar nombre directo si x1 lo provee
if not instance_name_raw:
instance_name_raw = f"#CTR_INSTANCE_{instr_uid}" # Placeholder final
print(f"Advertencia: No se encontró nombre/instancia para {instr_type_original} UID {instr_uid}. Usando placeholder '{instance_name_raw}'.")
instance_name_scl = format_variable_name(instance_name_raw)
# 4. Generar la llamada SCL
param_string = ", ".join(scl_call_params)
scl_call = f"{instance_name_scl}({param_string}); // TODO: Declarar {instance_name_scl} : {instr_type_original.upper()}; en VAR_STAT o VAR"
# Contadores no suelen tener EN/ENO explícito en LAD, se asume siempre habilitado
instruction["scl"] = scl_call # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# 4. Actualizar sympy_map para las salidas (QU, QD, CV)
output_pins_map = {
"CTU": ["QU", "CV"],
"CTD": ["QD", "CV"],
"CTUD": ["QU", "QD", "CV"]
}
output_pins = output_pins_map.get(instr_type_original.upper(), [])
for pin in output_pins:
map_key = (network_id, instr_uid, pin)
output_scl_access = f"{instance_name_scl}.{pin.upper()}"
if pin.upper() in ["QU", "QD"]: # These are boolean outputs
# *** Store SymPy Symbol for boolean outputs QU/QD ***
sympy_out_symbol = symbol_manager.get_symbol(output_scl_access)
if sympy_out_symbol:
sympy_map[map_key] = sympy_out_symbol # Store SYMBOL
else:
print(f"Error: Could not create symbol for {output_scl_access} in {instr_type_original} {instr_uid}")
sympy_map[map_key] = None
else:
# For non-boolean (like CV - count value), store SCL access string
sympy_map[map_key] = output_scl_access
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para los contadores CTU, CTD, CTUD."""
return [
{'type_name': 'ctu', 'processor_func': process_counter, 'priority': 5},
{'type_name': 'ctd', 'processor_func': process_counter, 'priority': 5},
{'type_name': 'ctud', 'processor_func': process_counter, 'priority': 5}
]

View File

@ -1,85 +0,0 @@
# processors/process_edge_detector.py
# -*- coding: utf-8 -*-
import sympy
import traceback
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed"
def process_edge_detector(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera la expresión SymPy para el pulso de PBox (P_TRIG) o NBox (N_TRIG).
Guarda la expresión SymPy del pulso en sympy_map['out'].
Genera y guarda el SCL para la actualización de memoria en '_edge_mem_update_scl'.
El campo 'scl' principal se deja casi vacío/comentario.
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "") # PBox o NBox
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# 1. Obtener CLK (como SymPy expr) y MemBit (como SymPy Symbol)
clk_input = instruction["inputs"].get("in")
mem_bit_input = instruction["inputs"].get("bit")
sympy_clk_expr = get_sympy_representation(clk_input, network_id, sympy_map, symbol_manager)
mem_bit_plc_name = extract_plc_variable_name(mem_bit_input)
sympy_mem_bit_symbol = symbol_manager.get_symbol(mem_bit_plc_name) if mem_bit_plc_name else None
# 2. Verificar dependencias
if sympy_clk_expr is None: return False
if sympy_mem_bit_symbol is None:
err_msg = f"MemBit no resuelto o no es variable para {instr_type_original} UID {instr_uid}"
print(f"Error: {err_msg}")
instruction["scl"] = f"// ERROR: {err_msg}"
instruction["type"] = instr_type_original + "_error"
return True
# 3. Generar Lógica SymPy del *pulso*
result_pulse_sympy_expr = sympy.false # Default
scl_comment_prefix = ""
if instr_type_original.upper() == "PBOX": # P_TRIG
result_pulse_sympy_expr = sympy.And(sympy_clk_expr, sympy.Not(sympy_mem_bit_symbol))
scl_comment_prefix = "P_TRIG"
elif instr_type_original.upper() == "NBOX": # N_TRIG
result_pulse_sympy_expr = sympy.And(sympy.Not(sympy_clk_expr), sympy_mem_bit_symbol)
scl_comment_prefix = "N_TRIG"
else: # Error
instruction["scl"] = f"// ERROR: Tipo de flanco inesperado {instr_type_original}"
instruction["type"] = instr_type_original + "_error"
return True
# 4. Generar el SCL para la actualización del bit de memoria
# Necesitamos la representación SCL de la entrada CLK
clk_scl_str = sympy_expr_to_scl(sympy_clk_expr, symbol_manager)
# Usamos el nombre PLC original formateado para el bit de memoria
mem_bit_scl_name = format_variable_name(mem_bit_plc_name)
scl_mem_update = f"{mem_bit_scl_name} := {clk_scl_str};"
scl_comment_for_update = f"// {scl_comment_prefix}({clk_scl_str}) - Mem: {mem_bit_scl_name}"
# 5. Almacenar Resultados
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = result_pulse_sympy_expr # Guardar EXPRESIÓN SymPy del pulso
# Guardar SCL de actualización + Comentario en campo temporal
instruction['_edge_mem_update_scl'] = f"{scl_mem_update} {scl_comment_for_update}"
# Marcar como procesado, SCL principal es solo comentario
instruction['scl'] = f"// {instr_type_original} SymPy processed, logic in consumer"
instruction["type"] = instr_type_original + SCL_SUFFIX
# 6. Propagar ENO (es la expresión SymPy de CLK)
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_clk_expr
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la info para los detectores de flanco PBox y NBox."""
return [
{'type_name': 'pbox', 'processor_func': process_edge_detector, 'priority': 2},
{'type_name': 'nbox', 'processor_func': process_edge_detector, 'priority': 2}
]

View File

@ -1,64 +0,0 @@
# processors/process_eq.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades de SymPy
from .processor_utils import get_sympy_representation, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed" # Nuevo sufijo
def process_eq(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera la expresión SymPy para el comparador de igualdad (EQ).
El resultado se propaga por sympy_map['out'].
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Eq")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener operandos como expresiones SymPy o constantes/strings
in1_info = instruction["inputs"].get("in1")
in2_info = instruction["inputs"].get("in2")
op1_sympy = get_sympy_representation(in1_info, network_id, sympy_map, symbol_manager)
op2_sympy = get_sympy_representation(in2_info, network_id, sympy_map, symbol_manager)
# Obtener 'pre' (RLO anterior) como expresión SymPy
pre_input = instruction["inputs"].get("pre") # Asumir 'pre' como entrada RLO estándar
sympy_pre_rlo = get_sympy_representation(pre_input, network_id, sympy_map, symbol_manager) if pre_input else sympy.true
# Verificar dependencias
if op1_sympy is None or op2_sympy is None or sympy_pre_rlo is None:
# print(f"DEBUG EQ {instr_uid}: Dependency not ready")
return False
# Crear la expresión de igualdad SymPy
try:
# sympify puede ser necesario si los operandos son strings de constantes
op1_eval = sympy.sympify(op1_sympy) if isinstance(op1_sympy, str) else op1_sympy
op2_eval = sympy.sympify(op2_sympy) if isinstance(op2_sympy, str) else op2_sympy
comparison_expr = sympy.Eq(op1_eval, op2_eval) # Eq para igualdad
except (SyntaxError, TypeError, ValueError) as e:
print(f"Error creating SymPy equality for {instr_uid}: {e}")
instruction["scl"] = f"// ERROR creando expr SymPy EQ {instr_uid}: {e}"
instruction["type"] = instr_type_original + "_error"
return True
# Guardar resultado (Expresión SymPy booleana) en el mapa para 'out'
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = comparison_expr
# Guardar el RLO de entrada ('pre') como ENO en el mapa SymPy
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_pre_rlo
# Marcar como procesado, SCL principal es solo comentario
instruction["scl"] = f"// SymPy EQ: {comparison_expr}" # Comentario opcional
instruction["type"] = instr_type_original + SCL_SUFFIX
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el comparador de igualdad (EQ)."""
return {'type_name': 'eq', 'processor_func': process_eq, 'priority': 2}

View File

@ -1,90 +0,0 @@
# processors/process_math.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_math(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL para operaciones matemáticas (SUB, MUL, DIV), simplificando EN.
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "") # SUB, MUL, DIV
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Mapa de tipos a operadores SCL string
op_map = {"SUB": "-", "MUL": "*", "DIV": "/"}
scl_operator = op_map.get(instr_type_original.upper())
if not scl_operator:
instruction["scl"] = f"// ERROR: Operación matemática no soportada: {instr_type_original}"
instruction["type"] = instr_type_original + "_error"
return True
# Obtener EN (SymPy), IN1, IN2 (SymPy o Constante/String)
en_input = instruction["inputs"].get("en")
in1_info = instruction["inputs"].get("in1")
in2_info = instruction["inputs"].get("in2")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
op1_sympy_or_const = get_sympy_representation(in1_info, network_id, sympy_map, symbol_manager)
op2_sympy_or_const = get_sympy_representation(in2_info, network_id, sympy_map, symbol_manager)
# Obtener destino SCL
target_scl_name = get_target_scl_name(instruction, "out", network_id, default_to_temp=True)
# Verificar dependencias
if sympy_en_expr is None or op1_sympy_or_const is None or op2_sympy_or_const is None or target_scl_name is None:
return False
# Convertir operandos SymPy/Constante a SCL strings
op1_scl = sympy_expr_to_scl(op1_sympy_or_const, symbol_manager)
op2_scl = sympy_expr_to_scl(op2_sympy_or_const, symbol_manager)
# Añadir paréntesis si contienen operadores (más seguro)
# La función sympy_expr_to_scl debería idealmente manejar esto, pero doble chequeo simple:
op1_scl_formatted = f"({op1_scl})" if re.search(r'[+\-*/ ]', op1_scl) else op1_scl
op2_scl_formatted = f"({op2_scl})" if re.search(r'[+\-*/ ]', op2_scl) else op2_scl
# Generar SCL Core
scl_core = f"{target_scl_name} := {op1_scl_formatted} {scl_operator} {op2_scl_formatted};"
# Aplicar Condición EN (Simplificando EN)
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for {instr_type_original} {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# Actualizar instrucción y mapa
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar valor de salida (nombre SCL del destino) y ENO (expresión SymPy)
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = target_scl_name # Guardar nombre del destino
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr # Guardar la expresión SymPy para ENO
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve info para SUB, MUL, DIV."""
return [
{'type_name': 'sub', 'processor_func': process_math, 'priority': 4},
{'type_name': 'mul', 'processor_func': process_math, 'priority': 4},
{'type_name': 'div', 'processor_func': process_math, 'priority': 4}
]

View File

@ -1,75 +0,0 @@
# processors/process_mod.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re # Importar re si no estaba
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_mod(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera SCL para Modulo (MOD), simplificando EN."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Mod")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener EN (SymPy), IN1, IN2 (SymPy o Constante/String)
en_input = instruction["inputs"].get("en")
in1_info = instruction["inputs"].get("in1")
in2_info = instruction["inputs"].get("in2")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
op1_sympy_or_const = get_sympy_representation(in1_info, network_id, sympy_map, symbol_manager)
op2_sympy_or_const = get_sympy_representation(in2_info, network_id, sympy_map, symbol_manager)
# Obtener destino SCL
target_scl_name = get_target_scl_name(instruction, "out", network_id, default_to_temp=True)
# Verificar dependencias
if sympy_en_expr is None or op1_sympy_or_const is None or op2_sympy_or_const is None or target_scl_name is None:
return False
# Convertir operandos SymPy/Constante a SCL strings
op1_scl = sympy_expr_to_scl(op1_sympy_or_const, symbol_manager)
op2_scl = sympy_expr_to_scl(op2_sympy_or_const, symbol_manager)
# Añadir paréntesis si contienen operadores
op1_scl_formatted = f"({op1_scl})" if re.search(r'[+\-*/ ]', op1_scl) else op1_scl
op2_scl_formatted = f"({op2_scl})" if re.search(r'[+\-*/ ]', op2_scl) else op2_scl
# Generar SCL Core
scl_core = f"{target_scl_name} := {op1_scl_formatted} MOD {op2_scl_formatted};"
# Aplicar Condición EN (Simplificando EN)
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for {instr_type_original} {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# Actualizar instrucción y mapa
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar valor de salida (nombre SCL del destino) y ENO (expresión SymPy)
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = target_scl_name # Guardar nombre del destino
map_key_eno = (network_id, instr_uid, "eno")
sympy_map[map_key_eno] = sympy_en_expr # Guardar la expresión SymPy para ENO
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la operación Modulo."""
return {'type_name': 'mod', 'processor_func': process_mod, 'priority': 4}

View File

@ -1,75 +0,0 @@
# processors/process_move.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re # Importar re
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_move(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera SCL para Move, simplificando la condición EN."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Move")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener EN (SymPy) e IN (SymPy o Constante/String)
en_input = instruction["inputs"].get("en")
in_info = instruction["inputs"].get("in")
sympy_en_expr = get_sympy_representation(en_input, network_id, sympy_map, symbol_manager) if en_input else sympy.true
input_sympy_or_const = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
# Obtener destino SCL (requiere destino explícito para MOVE)
target_scl_name = get_target_scl_name(instruction, "out1", network_id, default_to_temp=False)
if target_scl_name is None:
target_scl_name = get_target_scl_name(instruction, "out", network_id, default_to_temp=False)
# Verificar dependencias
if sympy_en_expr is None or input_sympy_or_const is None:
return False
if target_scl_name is None:
print(f"Error: MOVE {instr_uid} sin destino claro en 'out' o 'out1'.")
instruction["scl"] = f"// ERROR: MOVE {instr_uid} sin destino claro."
instruction["type"] = instr_type_original + "_error"
return True # Procesado con error
# Convertir la entrada (SymPy o Constante) a SCL string
input_scl = sympy_expr_to_scl(input_sympy_or_const, symbol_manager)
# Generar SCL Core
scl_core = f"{target_scl_name} := {input_scl};"
# Aplicar Condición EN (Simplificando EN)
scl_final = ""
if sympy_en_expr != sympy.true:
try:
#simplified_en_expr = sympy.simplify_logic(sympy_en_expr, force=True)
simplified_en_expr = sympy.logic.boolalg.to_dnf(sympy_en_expr, simplify=True)
except Exception as e:
print(f"Error simplifying EN for {instr_type_original} {instr_uid}: {e}")
simplified_en_expr = sympy_en_expr # Fallback
en_condition_scl = sympy_expr_to_scl(simplified_en_expr, symbol_manager)
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {en_condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
scl_final = scl_core
# Actualizar instrucción y mapa
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# Propagar valor de salida (nombre SCL del destino) y ENO (expresión SymPy)
# Asumiendo que out y out1 deben propagar el mismo valor
sympy_map[(network_id, instr_uid, "out")] = target_scl_name
sympy_map[(network_id, instr_uid, "out1")] = target_scl_name
sympy_map[(network_id, instr_uid, "eno")] = sympy_en_expr
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la operación Move."""
return {'type_name': 'move', 'processor_func': process_move, 'priority': 3}

View File

@ -1,54 +0,0 @@
# processors/process_not.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed" # Nuevo sufijo
def process_not(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera la expresión SymPy para la inversión lógica NOT."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "Not")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener entrada como expresión SymPy
in_info = instruction["inputs"].get("in")
sympy_expr_in = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
# Verificar dependencias
if sympy_expr_in is None:
# print(f"DEBUG Not {instr_uid}: Dependency not ready")
return False
# Crear la expresión NOT de SymPy
try:
not_expr = sympy.Not(sympy_expr_in)
# ¿Simplificar aquí? NOT(NOT A) -> A; NOT(TRUE) -> FALSE, etc.
# simplify_logic podría hacer esto, pero puede ser costoso en cada paso.
# SymPy podría manejar simplificaciones básicas automáticamente.
# Opcional: not_expr = sympy.simplify_logic(not_expr)
except Exception as e:
print(f"Error creating SymPy Not for {instr_uid}: {e}")
instruction["scl"] = f"// ERROR creando expr SymPy NOT {instr_uid}: {e}"
instruction["type"] = instr_type_original + "_error"
return True
# Guardar resultado (Expresión SymPy) en el mapa para 'out'
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = not_expr
# Marcar como procesado, SCL principal es solo comentario
instruction["scl"] = f"// SymPy NOT: {not_expr}" # Comentario opcional
instruction["type"] = instr_type_original + SCL_SUFFIX
# NOT no tiene EN/ENO explícito en LAD, modifica el RLO ('out')
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la operación Not."""
return {'type_name': 'not', 'processor_func': process_not, 'priority': 1}

View File

@ -1,69 +0,0 @@
# processors/process_o.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed" # Nuevo sufijo
def process_o(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera la expresión SymPy para la operación lógica O (OR)."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "O")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Buscar todas las entradas 'in', 'in1', 'in2', ...
input_pins = sorted([pin for pin in instruction.get("inputs", {}) if pin.startswith("in")])
if not input_pins:
print(f"Error: O {instr_uid} sin pines de entrada (inX).")
instruction["scl"] = f"// ERROR: O {instr_uid} sin pines inX"
instruction["type"] = instr_type_original + "_error"
return True
sympy_parts = []
all_resolved = True
for pin in input_pins:
input_info = instruction["inputs"][pin]
sympy_expr = get_sympy_representation(input_info, network_id, sympy_map, symbol_manager)
if sympy_expr is None:
all_resolved = False
# print(f"DEBUG: O {instr_uid} esperando pin {pin}")
break # Salir si una dependencia no está lista
# Optimización: No incluir FALSE en un OR
if sympy_expr != sympy.false:
sympy_parts.append(sympy_expr)
if not all_resolved:
return False # Esperar dependencias
# Construir la expresión OR de SymPy
result_sympy_expr = sympy.false # Valor por defecto si no hay entradas válidas o todas son FALSE
if sympy_parts:
# Usar sympy.Or para construir la expresión
result_sympy_expr = sympy.Or(*sympy_parts)
# Simplificar casos obvios como OR(X) -> X, OR(X, TRUE) -> TRUE
# simplify_logic aquí puede ser prematuro, mejor al final.
# Pero Or() podría simplificar automáticamente OR(X) -> X.
# Opcional: result_sympy_expr = sympy.simplify_logic(result_sympy_expr)
# Guardar la expresión SymPy resultante en el mapa para 'out'
map_key_out = (network_id, instr_uid, "out")
sympy_map[map_key_out] = result_sympy_expr
# Marcar como procesado, SCL principal es solo comentario
instruction["scl"] = f"// SymPy O: {result_sympy_expr}" # Comentario opcional
instruction["type"] = instr_type_original + SCL_SUFFIX
# La instrucción 'O' no tiene ENO propio, propaga el resultado por 'out'
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la operación lógica O (OR)."""
return {'type_name': 'o', 'processor_func': process_o, 'priority': 1}

View File

@ -1,74 +0,0 @@
# processors/process_rcoil.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_rcoil(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data ):
"""Genera SCL para Reset Coil (RCoil), simplificando la condición."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "RCoil")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener condición de entrada (SymPy expr)
in_info = instruction["inputs"].get("in")
sympy_expr_in = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
# Obtener operando (nombre SCL del destino)
target_scl_name = get_target_scl_name(instruction, "operand", network_id, default_to_temp=False) # RCoil necesita destino explícito
# Verificar dependencias
if sympy_expr_in is None: return False
if target_scl_name is None:
print(f"Error: RCoil {instr_uid} operando no es variable o falta info.")
instruction["scl"] = f"// ERROR: RCoil {instr_uid} operando no es variable."
instruction["type"] = instr_type_original + "_error"
return True
# No hacer nada si la condición es FALSE constante
if sympy_expr_in == sympy.false:
instruction["scl"] = f"// RCoil {instr_uid} con condición FALSE constante, optimizado."
instruction["type"] = instr_type_original + SCL_SUFFIX
return True
# Generar SCL Core (Reset)
scl_core = f"{target_scl_name} := FALSE;"
# Aplicar Condición IF si no es TRUE constante
scl_final = ""
if sympy_expr_in != sympy.true:
# Simplificar la condición ANTES de convertirla a SCL
try:
#simplified_expr = sympy.simplify_logic(sympy_expr_in, force=True)
simplified_expr = sympy.logic.boolalg.to_dnf(sympy_expr_in, simplify=True)
except Exception as e:
print(f"Error simplifying condition for RCoil {instr_uid}: {e}")
simplified_expr = sympy_expr_in # Fallback
condition_scl = sympy_expr_to_scl(simplified_expr, symbol_manager)
# Evitar IF TRUE THEN...
if condition_scl == "TRUE":
scl_final = scl_core
else:
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
# Condición es TRUE constante
scl_final = scl_core
# Actualizar instrucción
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# RCoil no tiene salida lógica para propagar en sympy_map
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la bobina Reset (RCoil)."""
return {'type_name': 'rcoil', 'processor_func': process_rcoil, 'priority': 3}

View File

@ -1,74 +0,0 @@
# processors/process_scoil.py
# -*- coding: utf-8 -*-
import sympy
import traceback
import re
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, get_target_scl_name, format_variable_name
from .symbol_manager import SymbolManager
SCL_SUFFIX = "_sympy_processed"
def process_scoil(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""Genera SCL para Set Coil (SCoil), simplificando la condición."""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "SCoil")
if instr_type_original.endswith(SCL_SUFFIX) or "_error" in instr_type_original:
return False
# Obtener condición de entrada (SymPy expr)
in_info = instruction["inputs"].get("in")
sympy_expr_in = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
# Obtener operando (nombre SCL del destino)
target_scl_name = get_target_scl_name(instruction, "operand", network_id, default_to_temp=False) # SCoil necesita destino
# Verificar dependencias
if sympy_expr_in is None: return False
if target_scl_name is None:
print(f"Error: SCoil {instr_uid} operando no es variable o falta info.")
instruction["scl"] = f"// ERROR: SCoil {instr_uid} operando no es variable."
instruction["type"] = instr_type_original + "_error"
return True
# No hacer nada si la condición es FALSE constante
if sympy_expr_in == sympy.false:
instruction["scl"] = f"// SCoil {instr_uid} con condición FALSE constante, optimizado."
instruction["type"] = instr_type_original + SCL_SUFFIX
return True
# Generar SCL Core (Set)
scl_core = f"{target_scl_name} := TRUE;"
# Aplicar Condición IF si no es TRUE constante
scl_final = ""
if sympy_expr_in != sympy.true:
# Simplificar la condición ANTES de convertirla a SCL
try:
#simplified_expr = sympy.simplify_logic(sympy_expr_in, force=True)
simplified_expr = sympy.logic.boolalg.to_dnf(sympy_expr_in, simplify=True)
except Exception as e:
print(f"Error simplifying condition for SCoil {instr_uid}: {e}")
simplified_expr = sympy_expr_in # Fallback
condition_scl = sympy_expr_to_scl(simplified_expr, symbol_manager)
# Evitar IF TRUE THEN...
if condition_scl == "TRUE":
scl_final = scl_core
else:
indented_core = "\n".join([f" {line}" for line in scl_core.splitlines()])
scl_final = f"IF {condition_scl} THEN\n{indented_core}\nEND_IF;"
else:
# Condición es TRUE constante
scl_final = scl_core
# Actualizar instrucción
instruction["scl"] = scl_final # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# SCoil no tiene salida lógica para propagar en sympy_map
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para la bobina Set (SCoil)."""
return {'type_name': 'scoil', 'processor_func': process_scoil, 'priority': 3}

View File

@ -1,77 +0,0 @@
# processors/process_sd.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name, get_target_scl_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed"
def process_sd(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL para Temporizador On-Delay (Sd -> TON).
Requiere datos de instancia (DB o STAT/TEMP).
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = "Sd" # Tipo original LAD
if instruction.get("type","").endswith(SCL_SUFFIX) or "_error" in instruction.get("type",""):
return False
# 1. Obtener Inputs: s (start), tv (time value), timer (instance)
s_info = instruction["inputs"].get("s")
tv_info = instruction["inputs"].get("tv")
timer_instance_info = instruction["inputs"].get("timer")
sympy_s_expr = get_sympy_representation(s_info, network_id, sympy_map, symbol_manager)
# tv suele ser constante, pero lo obtenemos igual
sympy_or_const_tv = get_sympy_representation(tv_info, network_id, sympy_map, symbol_manager)
# Obtener el nombre de la INSTANCIA (no su valor)
instance_plc_name = extract_plc_variable_name(timer_instance_info)
# Verificar dependencias
if sympy_s_expr is None or sympy_or_const_tv is None: return False
if instance_plc_name is None:
print(f"Error: Sd {instr_uid} sin variable de instancia 'timer'.")
instance_plc_name = f"#TON_INSTANCE_{instr_uid}" # Placeholder con error implícito
print(f"Advertencia: Usando placeholder '{instance_plc_name}'. ¡Declarar en SCL!")
# Podríamos marcar como error, pero intentamos generar algo
# instruction["type"] = instr_type_original + "_error"
# return True
# Formatear nombre de instancia
instance_name_scl = format_variable_name(instance_plc_name)
# Convertir entradas SymPy/Constante a SCL strings
s_scl = sympy_expr_to_scl(sympy_s_expr, symbol_manager)
tv_scl = sympy_expr_to_scl(sympy_or_const_tv, symbol_manager)
# Generar la llamada SCL (TON usa IN, PT)
# Ignoramos 'r' (reset) de Sd
scl_call = f"{instance_name_scl}(IN := {s_scl}, PT := {tv_scl}); // TODO: Declarar {instance_name_scl} : TON;"
# Actualizar instrucción
instruction["scl"] = scl_call # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# 7. Actualizar sympy_map para las salidas Q y RT
map_key_q = (network_id, instr_uid, "q")
q_output_scl_access = f"{instance_name_scl}.Q" # SCL string to access output
# *** GET/CREATE AND STORE SYMBOL for boolean output Q ***
sympy_q_symbol = symbol_manager.get_symbol(q_output_scl_access)
if sympy_q_symbol:
sympy_map[map_key_q] = sympy_q_symbol # STORE THE SYMBOL OBJECT
else:
print(f"Error: Could not create symbol for {q_output_scl_access} in Sd {instr_uid}")
sympy_map[map_key_q] = None # Indicate error/unresolved
map_key_rt = (network_id, instr_uid, "rt")
# ET is TIME, store SCL access string
sympy_map[map_key_rt] = f"{instance_name_scl}.ET"
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la información para el temporizador On-Delay (Sd -> TON)."""
return {'type_name': 'sd', 'processor_func': process_sd, 'priority': 5}

View File

@ -1,112 +0,0 @@
# processors/process_se.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name, get_target_scl_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed"
def process_se(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL para Temporizador de Pulso (Se -> TP) o SdCoil (-> TON).
Usa SymPy para entradas y almacena Symbol para salida Q.
"""
instr_uid = instruction["instruction_uid"]
# Obtener tipo original (antes de añadir sufijo) para determinar comportamiento
instr_type_original = instruction.get("type", "").replace(SCL_SUFFIX,"").replace("_error","") # Se o SdCoil
current_type = instruction.get("type","") # Tipo actual para chequeo inicial
if current_type.endswith(SCL_SUFFIX) or "_error" in current_type:
return False
# Determinar el tipo de instrucción SCL y pines de entrada/salida correctos
scl_timer_type = "TP"
pin_in = "s" # Pin de entrada para Se
pin_time = "tv" # Pin de valor de tiempo para Se
pin_instance = "timer" # Pin donde se conecta la instancia para Se
pin_out_q = "q" # Pin de salida Q para Se
pin_out_time = "rt" # Pin de tiempo restante para Se -> TP.ET
# Ajustar pines si el tipo original era SdCoil
if instr_type_original == "SdCoil":
scl_timer_type = "TON" # SdCoil es funcionalmente un TON
pin_in = "in" # SdCoil usa 'in'
pin_time = "value" # SdCoil usa 'value'
pin_instance = "operand" # SdCoil usa 'operand' como instancia/variable de salida
pin_out_q = "out" # SdCoil usa 'out' como pin de salida Q
pin_out_time = None # SdCoil no tiene salida ET explícita
# 1. Obtener Inputs usando los nombres de pin correctos
s_info = instruction["inputs"].get(pin_in)
tv_info = instruction["inputs"].get(pin_time)
timer_instance_info = instruction["inputs"].get(pin_instance)
# Obtener representaciones (SymPy o Constante/String)
sympy_s_expr = get_sympy_representation(s_info, network_id, sympy_map, symbol_manager)
sympy_or_const_tv = get_sympy_representation(tv_info, network_id, sympy_map, symbol_manager)
# Obtener el nombre PLC original de la INSTANCIA
instance_plc_name = extract_plc_variable_name(timer_instance_info)
# 2. Verificar dependencias
if sympy_s_expr is None or sympy_or_const_tv is None:
# print(f"DEBUG {instr_type_original} {instr_uid}: Input/TV dependency not ready")
return False
if instance_plc_name is None:
print(f"Error: {instr_type_original} {instr_uid} sin variable de instancia en pin '{pin_instance}'.")
instance_plc_name = f"#{scl_timer_type}_INSTANCE_{instr_uid}" # Placeholder
print(f"Advertencia: Usando placeholder '{instance_plc_name}'. ¡Declarar en SCL!")
# 3. Formatear nombre de instancia para SCL
instance_name_scl = format_variable_name(instance_plc_name)
# 4. Convertir entradas SymPy/Constante a SCL strings (simplificando la entrada IN)
try:
# Simplificar la expresión de entrada booleana
simplified_s_expr = sympy.simplify_logic(sympy_s_expr, force=True)
simplified_s_expr = sympy.logic.boolalg.to_dnf(sympy_s_expr, simplify=True)
except Exception as e:
print(f"Error simplifying '{pin_in}' input for {instr_type_original} {instr_uid}: {e}")
simplified_s_expr = sympy_s_expr # Fallback
s_scl = sympy_expr_to_scl(simplified_s_expr, symbol_manager)
# tv normalmente es constante, sympy_expr_to_scl debería manejarlo
tv_scl = sympy_expr_to_scl(sympy_or_const_tv, symbol_manager)
# 5. Generar la llamada SCL
# Ignoramos 'r' (reset) de Se si existiera
scl_call = f"{instance_name_scl}(IN := {s_scl}, PT := {tv_scl}); // TODO: Declarar {instance_name_scl} : {scl_timer_type};"
# 6. Actualizar instrucción con el SCL final
instruction["scl"] = scl_call
instruction["type"] = instr_type_original + SCL_SUFFIX # Marcar como procesado
# 7. Actualizar sympy_map para las salidas (Q y ET si aplica)
# Usar los nombres de pin originales determinados al principio
map_key_q = (network_id, instr_uid, pin_out_q) # pin_out_q es 'q' o 'out'
q_output_scl_access = f"{instance_name_scl}.Q" # Siempre accedemos a .Q del FB SCL
# *** OBTENER/CREAR Y ALMACENAR SYMBOL para la salida booleana Q ***
sympy_q_symbol = symbol_manager.get_symbol(q_output_scl_access)
if sympy_q_symbol:
sympy_map[map_key_q] = sympy_q_symbol # Almacenar el OBJETO SYMBOL
else:
# Manejar error si no se pudo crear el símbolo
print(f"Error: No se pudo crear símbolo para {q_output_scl_access} en {instr_type_original} {instr_uid}")
sympy_map[map_key_q] = None # Indicar error/irresoluble
# Almacenar ET solo si corresponde (para Se, no para SdCoil)
if pin_out_time: # pin_out_time es 'rt' o None
map_key_rt = (network_id, instr_uid, pin_out_time)
# ET es TIME, no booleano. Almacenar el string SCL de acceso está bien.
sympy_map[map_key_rt] = f"{instance_name_scl}.ET" # Salida ET del FB SCL
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve la info para Se (-> TP) y SdCoil (-> TON, manejado aquí)."""
return [
{'type_name': 'se', 'processor_func': process_se, 'priority': 5},
# Asegurarse que x1.py mapea SdCoil a este procesador o a uno específico
{'type_name': 'sdcoil', 'processor_func': process_se, 'priority': 5}
]

View File

@ -1,85 +0,0 @@
# processors/process_timer.py
# -*- coding: utf-8 -*-
import sympy
import traceback
# Usar las nuevas utilidades
from .processor_utils import get_sympy_representation, sympy_expr_to_scl, format_variable_name, get_target_scl_name
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed"
def process_timer(instruction, network_id, sympy_map, symbol_manager: SymbolManager, data):
"""
Genera SCL para Temporizadores (TON, TOF) directamente.
Requiere datos de instancia.
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = instruction.get("type", "").replace(SCL_SUFFIX,"").replace("_error","") # TON o TOF
if instruction.get("type","").endswith(SCL_SUFFIX) or "_error" in instruction.get("type",""):
return False
scl_timer_type = instr_type_original.upper()
if scl_timer_type not in ["TON", "TOF"]:
instruction["scl"] = f"// ERROR: Tipo de temporizador directo no soportado: {instr_type_original}"
instruction["type"] = instr_type_original + "_error"
return True
# 1. Obtener Inputs: IN, PT, y nombre de instancia (implícito o explícito)
in_info = instruction["inputs"].get("IN")
pt_info = instruction["inputs"].get("PT")
# Buscar instancia: ¿está en inputs? ¿o como instance_db?
instance_plc_name = instruction.get("instance_db") # Buscar primero aquí
if not instance_plc_name:
# Si no, buscar un input llamado 'timer' o similar? No estándar.
# Asumir que debe estar declarado como STAT si no hay instance_db
instance_plc_name = instruction.get("instance_name") # Nombre directo?
if not instance_plc_name:
instance_plc_name = f"#{scl_timer_type}_INSTANCE_{instr_uid}" # Placeholder final
print(f"Advertencia: No se encontró nombre/instancia para {instr_type_original} UID {instr_uid}. Usando placeholder '{instance_plc_name}'.")
sympy_in_expr = get_sympy_representation(in_info, network_id, sympy_map, symbol_manager)
sympy_or_const_pt = get_sympy_representation(pt_info, network_id, sympy_map, symbol_manager)
# Verificar dependencias
if sympy_in_expr is None or sympy_or_const_pt is None or instance_plc_name is None:
return False
# Formatear nombre de instancia
instance_name_scl = format_variable_name(instance_plc_name)
# Convertir entradas SymPy/Constante a SCL strings
in_scl = sympy_expr_to_scl(sympy_in_expr, symbol_manager)
pt_scl = sympy_expr_to_scl(sympy_or_const_pt, symbol_manager)
# Generar la llamada SCL
scl_call = f"{instance_name_scl}(IN := {in_scl}, PT := {pt_scl}); // TODO: Declarar {instance_name_scl} : {scl_timer_type};"
# Actualizar instrucción
instruction["scl"] = scl_call # SCL final generado
instruction["type"] = instr_type_original + SCL_SUFFIX
# 7. Actualizar sympy_map para las salidas Q y ET
map_key_q = (network_id, instr_uid, "Q") # Pin estándar SCL
# *** Store SymPy Symbol for boolean output Q ***
q_output_scl_access = f"{instance_name_scl}.Q" # String for SCL access
sympy_q_symbol = symbol_manager.get_symbol(q_output_scl_access) # Get/Create Symbol
if sympy_q_symbol:
sympy_map[map_key_q] = sympy_q_symbol # Store the SYMBOL
else:
print(f"Error: Could not create symbol for {q_output_scl_access} in {instr_type_original} {instr_uid}")
sympy_map[map_key_q] = None
map_key_et = (network_id, instr_uid, "ET") # Pin estándar SCL
# ET is TIME, store SCL access string
sympy_map[map_key_et] = f"{instance_name_scl}.ET"
return True
# --- Processor Information Function ---
def get_processor_info():
"""Devuelve info para TON y TOF directos."""
return [
{'type_name': 'ton', 'processor_func': process_timer, 'priority': 5},
{'type_name': 'tof', 'processor_func': process_timer, 'priority': 5}
]

View File

@ -1,310 +0,0 @@
# -*- coding: utf-8 -*-
# processors/processor_utils.py
import re
import sympy
from .symbol_manager import SymbolManager, extract_plc_variable_name
SCL_SUFFIX = "_sympy_processed" # <<< AÑADE ESTA LÍNEA
def format_variable_name(name):
"""Limpia el nombre de la variable para SCL."""
if not name:
return "_INVALID_NAME_"
if name.startswith('"') and name.endswith('"'):
return name
prefix = ""
if name.startswith("#"):
prefix = "#"
name = name[1:]
if name and name[0].isdigit():
name = "_" + name
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
return prefix + name
def get_sympy_representation(source_info, network_id, sympy_map, symbol_manager):
"""Gets the SymPy expression object representing the source."""
if not source_info:
print("Warning: get_sympy_representation called with None source_info.")
return None # Or raise error
# Handle lists (OR branches) - Recursively call and combine with sympy.Or
if isinstance(source_info, list):
sympy_parts = []
all_resolved = True
for sub_source in source_info:
sub_sympy = get_sympy_representation(sub_source, network_id, sympy_map, symbol_manager)
if sub_sympy is None:
all_resolved = False
break
sympy_parts.append(sub_sympy)
if not all_resolved:
return None
if not sympy_parts:
return sympy.false # Empty OR is false
# Return sympy.Or only if there are multiple parts
return sympy.Or(*sympy_parts) if len(sympy_parts) > 1 else sympy_parts[0]
# Handle single source dictionary
source_type = source_info.get("type")
if source_type == "powerrail":
return sympy.true
elif source_type == "variable":
plc_name = extract_plc_variable_name(source_info)
if plc_name:
return symbol_manager.get_symbol(plc_name)
else:
print(f"Error: Variable source without name: {source_info}")
return None # Error case
elif source_type == "constant":
# Represent constants directly if possible, otherwise maybe as symbols?
# For boolean simplification, only TRUE/FALSE matter significantly.
dtype = str(source_info.get("datatype", "")).upper()
value = source_info.get("value")
if dtype == "BOOL":
return sympy.true if str(value).upper() == "TRUE" else sympy.false
else:
# For simplification, treat non-boolean constants as opaque symbols?
# Or just return their string representation if they won't be simplified anyway?
# Let's return their string value for now, processors will handle it.
# This might need refinement if constants need symbolic handling.
return str(value) # Or maybe symbol_manager.get_symbol(str(value))?
elif source_type == "connection":
map_key = (
network_id,
source_info.get("source_instruction_uid"),
source_info.get("source_pin"),
)
# Return the SymPy object from the map
return sympy_map.get(map_key) # Returns None if not found (dependency not ready)
elif source_type == "unknown_source":
print(f"Warning: Referring to unknown source UID: {source_info.get('uid')}")
return None # Cannot resolve
else:
print(f"Warning: Unknown source type: {source_info}")
return None # Cannot resolve
def sympy_expr_to_scl(expr, symbol_manager, format_prec=5):
"""Converts a SymPy expression to an SCL string using the symbol map."""
if expr is None: return "/* ERROR: None expression */"
if expr == sympy.true: return "TRUE"
if expr == sympy.false: return "FALSE"
# Use sympy's string printer with custom settings if needed
# For boolean, standard printing might be okay, but need to substitute symbols
try:
# Get the inverse map (py_id -> plc_name)
inverse_map = symbol_manager.get_inverse_map()
# Substitute symbols back to their py_id strings first
# Need to handle the structure (And, Or, Not)
scl_str = sympy.sstr(expr, order=None) # Basic string representation
# Now, carefully replace py_id back to PLC names using regex
# Sort keys by length descending to replace longer IDs first
for py_id in sorted(inverse_map.keys(), key=len, reverse=True):
# Use word boundaries to avoid replacing parts of other IDs
scl_str = re.sub(r'\b' + re.escape(py_id) + r'\b', inverse_map[py_id], scl_str)
# Replace SymPy operators/functions with SCL equivalents
scl_str = scl_str.replace('&', ' AND ')
scl_str = scl_str.replace('|', ' OR ')
scl_str = scl_str.replace('^', ' XOR ') # If XOR is used
scl_str = scl_str.replace('~', 'NOT ')
# Add spaces around operators if needed after substitution
scl_str = re.sub(r'AND', ' AND ', scl_str)
scl_str = re.sub(r'OR', ' OR ', scl_str)
scl_str = re.sub(r'XOR', ' XOR ', scl_str)
scl_str = re.sub(r'NOT', 'NOT ', scl_str) # Space after NOT
# Clean up potential double spaces, etc.
scl_str = re.sub(r'\s+', ' ', scl_str).strip()
# Handle parentheses potentially added by sstr - maybe remove redundant ones?
# Be careful not to break operator precedence.
return scl_str
except Exception as e:
print(f"Error converting SymPy expr '{expr}' to SCL: {e}")
traceback.print_exc()
return f"/* ERROR converting SymPy: {expr} */"
def get_scl_representation(source_info, network_id, scl_map, access_map):
if not source_info:
return None
if isinstance(source_info, list):
scl_parts = []
all_resolved = True
for sub_source in source_info:
sub_scl = get_scl_representation(
sub_source, network_id, scl_map, access_map
)
if sub_scl is None:
all_resolved = False
break
if (
sub_scl in ["TRUE", "FALSE"]
or (sub_scl.startswith('"') and sub_scl.endswith('"'))
or sub_scl.isdigit()
or (sub_scl.startswith("(") and sub_scl.endswith(")"))
):
scl_parts.append(sub_scl)
else:
scl_parts.append(f"({sub_scl})")
return (
" OR ".join(scl_parts)
if len(scl_parts) > 1
else (scl_parts[0] if scl_parts else "FALSE") if all_resolved else None
)
source_type = source_info.get("type")
if source_type == "powerrail":
return "TRUE"
elif source_type == "variable":
name = source_info.get("name")
# Asegurar que los nombres de variables se formatean correctamente aquí también
return (
format_variable_name(name)
if name
else f"_ERR_VAR_NO_NAME_{source_info.get('uid')}_"
)
elif source_type == "constant":
dtype = str(source_info.get("datatype", "")).upper()
value = source_info.get("value")
try:
if dtype == "BOOL":
return str(value).upper()
elif dtype in [
"INT",
"DINT",
"SINT",
"USINT",
"UINT",
"UDINT",
"LINT",
"ULINT",
"WORD",
"DWORD",
"LWORD",
"BYTE",
]:
return str(value)
elif dtype in ["REAL", "LREAL"]:
s_val = str(value)
return s_val if "." in s_val or "e" in s_val.lower() else s_val + ".0"
elif dtype == "STRING":
# Escapar comillas simples dentro del string si es necesario
str_val = str(value).replace("'", "''")
return f"'{str_val}'"
elif dtype == "TYPEDCONSTANT":
# Podría necesitar formateo específico basado en el tipo real
return str(value)
else:
# Otros tipos (TIME, DATE, etc.) - devolver como string por ahora
str_val = str(value).replace("'", "''")
return f"'{str_val}'"
except Exception as e:
print(f"Advertencia: Error formateando constante {source_info}: {e}")
return f"_ERR_CONST_FORMAT_{source_info.get('uid')}_"
elif source_type == "connection":
map_key = (
network_id,
source_info.get("source_instruction_uid"),
source_info.get("source_pin"),
)
return scl_map.get(map_key)
elif source_type == "unknown_source":
print(
f"Advertencia: Refiriendo a fuente desconocida UID: {source_info.get('uid')}"
)
return f"_ERR_UNKNOWN_SRC_{source_info.get('uid')}_"
else:
print(f"Advertencia: Tipo de fuente desconocido: {source_info}")
return f"_ERR_INVALID_SRC_TYPE_"
def format_variable_name(name):
"""Limpia el nombre de la variable para SCL."""
if not name:
return "_INVALID_NAME_"
# Si ya está entre comillas dobles, asumimos que es un nombre complejo (ej. "DB"."Variable")
# y lo devolvemos tal cual para SCL.
if name.startswith('"') and name.endswith('"'):
# Podríamos añadir validación extra aquí si fuera necesario
return name
# Si no tiene comillas, es un nombre simple (ej. Tag_1, #tempVar)
# Reemplazar caracteres no válidos (excepto '_') por '_'
# Permitir '#' al inicio para variables temporales
prefix = ""
if name.startswith("#"):
prefix = "#"
name = name[1:]
# Permitir letras, números y guiones bajos. Reemplazar el resto.
# Asegurarse de que no empiece con número (después del # si existe)
if name and name[0].isdigit():
name = "_" + name
# Reemplazar caracteres no válidos
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
return prefix + name
def generate_temp_var_name(network_id, instr_uid, pin_name):
net_id_clean = str(network_id).replace("-", "_")
instr_uid_clean = str(instr_uid).replace("-", "_")
pin_name_clean = str(pin_name).replace("-", "_").lower()
# Usar # para variables temporales SCL estándar
return f"#_temp_{net_id_clean}_{instr_uid_clean}_{pin_name_clean}"
def get_target_scl_name(instruction, pin_name, network_id, default_to_temp=True):
"""Gets the SCL formatted name for a target variable.
Handles instruction outputs AND specific inputs like Coil operand.
"""
instr_uid = instruction["instruction_uid"]
# Ahora SCL_SUFFIX está definido en este módulo
instr_type_upper = instruction.get("type", "").upper().replace(SCL_SUFFIX.upper(), "").replace("_ERROR", "") # Check original type
target_info = None
# Special handling for inputs that represent the target variable
if instr_type_upper in ["COIL", "SCOIL", "RCOIL"] and pin_name == "operand":
target_info = instruction.get("inputs", {}).get("operand")
# Add other instructions where input pin == target if necessary
# elif instr_type_upper == "XYZ" and pin_name == "some_input_target_pin":
# target_info = instruction.get("inputs", {}).get(pin_name)
else:
# Default: Assume pin_name refers to an output pin
output_pin_data = instruction.get("outputs", {}).get(pin_name)
# Check if it's a list and has one connection (standard case)
if (output_pin_data and isinstance(output_pin_data, list) and len(output_pin_data) == 1):
target_info = output_pin_data[0]
# Add handling for direct output assignment if your JSON structure supports it
target_scl = None
if target_info:
if target_info.get("type") == "variable":
plc_name = target_info.get("name")
if plc_name:
target_scl = format_variable_name(plc_name) # Use existing util
else:
print(f"Error: Target variable for {instr_uid}.{pin_name} has no name (UID: {target_info.get('uid')}).")
elif target_info.get("type") == "constant":
print(f"Advertencia: Attempt to write to constant target {instr_uid}.{pin_name} (UID: {target_info.get('uid')}).")
# else: # Handle other target types if needed
# print(f"Advertencia: Target {instr_uid}.{pin_name} is not a variable: {target_info.get('type')}.")
# else: # No target info found for the specified pin
# print(f"DEBUG: No target info found for {instr_uid}.{pin_name}")
pass
# Handle default_to_temp logic
if target_scl:
return target_scl
elif default_to_temp:
# Generate temp only if no explicit target was found AND default is allowed
print(f"INFO: Generating temp var for {instr_uid}.{pin_name}") # Be informative
return generate_temp_var_name(network_id, instr_uid, pin_name)
else:
# No target found and default temps not allowed
return None

View File

@ -1,58 +0,0 @@
# processors/symbol_manager.py
import sympy
import re
class SymbolManager:
def __init__(self):
# plc_name -> py_id (e.g., '"DB".Var' -> 'v0_')
self.plc_to_py_id = {}
# py_id -> Symbol object (e.g., 'v0_' -> sympy.Symbol('v0_'))
self.py_id_to_symbol = {}
# py_id -> plc_name (e.g., 'v0_' -> '"DB".Var') - Inverse mapping
self.py_id_to_plc = {}
self.counter = 0
# Pre-define common keywords/constants to avoid mapping them
self.reserved_names = {"TRUE", "FALSE"} # Add others if needed
def _generate_py_id(self):
py_id = f"v{self.counter}_"
self.counter += 1
# Extremely unlikely collision, but check anyway
while py_id in self.py_id_to_symbol:
py_id = f"v{self.counter}_"
self.counter += 1
return py_id
def get_symbol(self, plc_var_name):
"""Gets/Creates a SymPy Symbol for a PLC variable name."""
if plc_var_name is None:
print("Warning: Attempted to get symbol for None PLC name.")
return None # Or handle error appropriately
if plc_var_name.upper() in self.reserved_names:
print(f"Warning: Attempted to create symbol for reserved name: {plc_var_name}")
return None # Or handle differently (e.g., return sympy.true/false?)
if plc_var_name not in self.plc_to_py_id:
py_id = self._generate_py_id()
self.plc_to_py_id[plc_var_name] = py_id
self.py_id_to_plc[py_id] = plc_var_name
self.py_id_to_symbol[py_id] = sympy.symbols(py_id)
# print(f"DEBUG SymbolManager: Created {py_id} -> {plc_var_name}") # Debug
else:
py_id = self.plc_to_py_id[plc_var_name]
return self.py_id_to_symbol.get(py_id)
def get_plc_name(self, py_id):
"""Gets the original PLC name from a py_id."""
return self.py_id_to_plc.get(py_id)
def get_inverse_map(self):
"""Returns the map needed for postprocessing (py_id -> plc_name)."""
return self.py_id_to_plc.copy()
# Helper function to extract PLC variable name from JSON operand info
def extract_plc_variable_name(operand_info):
if operand_info and operand_info.get("type") == "variable":
return operand_info.get("name")
return None # Not a variable or info missing

View File

@ -1,165 +0,0 @@
import argparse
import subprocess
import os
import sys
import locale
import glob
# (Función get_console_encoding y variable CONSOLE_ENCODING como antes)
def get_console_encoding():
"""Obtiene la codificación preferida de la consola, con fallback."""
try:
return locale.getpreferredencoding(False)
except Exception:
# Fallback común en Windows si falla getpreferredencoding
return "cp1252" # O prueba con 'utf-8' si cp1252 da problemas
CONSOLE_ENCODING = get_console_encoding()
# print(f"Detected console encoding: {CONSOLE_ENCODING}")
# (Función run_script como antes, usando CONSOLE_ENCODING)
def run_script(script_name, xml_arg):
"""Runs a given script with the specified XML file argument."""
# Asegurarse que la ruta al script sea absoluta o relativa al script actual
script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), script_name)
# Usar la ruta absoluta al ejecutable de Python actual
python_executable = sys.executable
command = [python_executable, script_path, xml_arg] # Usar la ruta absoluta de python
print(f"\n--- Running {script_name} with argument: {xml_arg} ---")
try:
# Ejecutar el proceso hijo
result = subprocess.run(
command,
check=True, # Lanza excepción si el script falla (return code != 0)
capture_output=True,# Captura stdout y stderr
text=True, # Decodifica stdout/stderr como texto
encoding=CONSOLE_ENCODING, # Usa la codificación detectada
errors='replace' # Reemplaza caracteres no decodificables
)
# Imprimir stdout y stderr si no están vacíos
stdout_clean = result.stdout.strip() if result.stdout else ""
stderr_clean = result.stderr.strip() if result.stderr else ""
if stdout_clean:
print(stdout_clean)
if stderr_clean:
# Imprimir stderr claramente para errores del script hijo
print(f"--- Stderr ({script_name}) ---", file=sys.stderr) # Imprimir en stderr
print(stderr_clean, file=sys.stderr)
print("--------------------------", file=sys.stderr)
print(f"--- {script_name} finished successfully ---")
return True # Indicar éxito
except FileNotFoundError:
# Error si el script python o el ejecutable no se encuentran
print(f"Error: Script '{script_path}' or Python executable '{python_executable}' not found.", file=sys.stderr)
return False
except subprocess.CalledProcessError as e:
# Error si el script hijo devuelve un código de error (ej., sys.exit(1))
print(f"Error running {script_name}: Script returned non-zero exit code {e.returncode}.", file=sys.stderr)
# Decodificar e imprimir stdout/stderr del proceso fallido
stdout_decoded = e.stdout.strip() if e.stdout else ""
stderr_decoded = e.stderr.strip() if e.stderr else ""
if stdout_decoded:
print(f"--- Stdout ({script_name}) ---", file=sys.stderr)
print(stdout_decoded, file=sys.stderr)
if stderr_decoded:
print(f"--- Stderr ({script_name}) ---", file=sys.stderr)
print(stderr_decoded, file=sys.stderr)
print("--------------------------", file=sys.stderr)
return False # Indicar fallo
except Exception as e:
# Otros errores inesperados
print(f"An unexpected error occurred while running {script_name}: {e}", file=sys.stderr)
# Imprimir traceback para depuración
import traceback
traceback.print_exc(file=sys.stderr)
return False # Indicar fallo
# --- NO SE NECESITA select_xml_file() si procesamos todos ---
if __name__ == "__main__":
# --- PARTE 1: BUSCAR ARCHIVOS ---
# Directorio base donde buscar los archivos XML (relativo al script)
base_search_dir = "XML Project"
# Obtener la ruta absoluta del directorio donde está x0_main.py
script_dir = os.path.dirname(os.path.abspath(__file__))
xml_project_dir = os.path.join(script_dir, base_search_dir)
print(f"Buscando archivos XML recursivamente en: '{xml_project_dir}'")
# Verificar si el directorio 'XML Project' existe
if not os.path.isdir(xml_project_dir):
print(f"Error: El directorio '{xml_project_dir}' no existe o no es un directorio.", file=sys.stderr)
print("Por favor, crea el directorio 'XML Project' en la misma carpeta que este script y coloca tus archivos XML dentro.")
sys.exit(1) # Salir con error
# Buscar todos los archivos .xml recursivamente
search_pattern = os.path.join(xml_project_dir, "**", "*.xml")
xml_files_found = glob.glob(search_pattern, recursive=True)
if not xml_files_found:
print(f"No se encontraron archivos XML en '{xml_project_dir}' o sus subdirectorios.")
sys.exit(0) # Salir limpiamente si no hay archivos
print(f"Se encontraron {len(xml_files_found)} archivos XML para procesar:")
xml_files_found.sort() # Ordenar para consistencia
for xml_file in xml_files_found:
print(f" - {os.path.relpath(xml_file, script_dir)}")
# --- PARTE 2: PROCESAR CADA ARCHIVO ---
# Scripts a ejecutar en secuencia
script1 = "x1_to_json.py"
script2 = "x2_process.py"
script3 = "x3_generate_scl.py"
processed_count = 0
failed_count = 0
# Procesar cada archivo encontrado en el bucle
for xml_filepath in xml_files_found:
relative_path = os.path.relpath(xml_filepath, script_dir)
print(f"\n--- Iniciando pipeline para: {relative_path} ---")
# Usar la ruta absoluta para los scripts hijos
absolute_xml_filepath = os.path.abspath(xml_filepath)
# Ejecutar los scripts en secuencia
success = True
if not run_script(script1, absolute_xml_filepath):
print(f"\nPipeline falló en el script '{script1}' para el archivo: {relative_path}", file=sys.stderr)
success = False
elif not run_script(script2, absolute_xml_filepath):
print(f"\nPipeline falló en el script '{script2}' para el archivo: {relative_path}", file=sys.stderr)
success = False
elif not run_script(script3, absolute_xml_filepath):
print(f"\nPipeline falló en el script '{script3}' para el archivo: {relative_path}", file=sys.stderr)
success = False
# Actualizar contadores y mostrar estado
if success:
print(f"--- Pipeline completado exitosamente para: {relative_path} ---")
processed_count += 1
else:
failed_count += 1
print(f"--- Pipeline falló para: {relative_path} ---", file=sys.stderr) # Indicar fallo
# --- PARTE 3: RESUMEN FINAL ---
print("\n--- Resumen Final del Procesamiento ---")
print(f"Total de archivos XML encontrados: {len(xml_files_found)}")
print(f"Archivos procesados exitosamente por el pipeline completo: {processed_count}")
print(f"Archivos que fallaron en algún punto del pipeline: {failed_count}")
print("---------------------------------------")
# Salir con código 0 si todo fue bien, 1 si hubo fallos
if failed_count > 0:
sys.exit(1)
else:
sys.exit(0)
# --- FIN: Se elimina la lógica redundante que venía después del bucle ---

File diff suppressed because it is too large Load Diff

View File

@ -1,643 +0,0 @@
# -*- coding: utf-8 -*-
import json
import argparse
import os
import copy
import traceback
import re
import importlib
import sys
import sympy # Import sympy
# Import necessary components from processors directory
from processors.processor_utils import (
format_variable_name, # Keep if used outside processors
sympy_expr_to_scl, # Needed for IF grouping and maybe others
# get_target_scl_name might be used here? Unlikely.
)
from processors.symbol_manager import SymbolManager # Import the manager
# --- Constantes y Configuración ---
SCL_SUFFIX = "_sympy_processed" # New suffix to indicate processing method
GROUPED_COMMENT = "// Logic included in grouped IF"
SIMPLIFIED_IF_COMMENT = "// Simplified IF condition by script" # May still be useful
# Global data dictionary
data = {}
# --- (Incluye aquí las funciones process_group_ifs y load_processors SIN CAMBIOS) ---
def process_group_ifs(instruction, network_id, sympy_map, symbol_manager, data):
"""
Busca condiciones (ya procesadas -> tienen expr SymPy en sympy_map)
y, si habilitan un grupo (>1) de bloques funcionales (con SCL ya generado),
construye el bloque IF agrupado CON LA CONDICIÓN SIMPLIFICADA.
Modifica el campo 'scl' de la instrucción generadora de condición.
(Esta es la implementación de la función como la tenías en el archivo original)
"""
instr_uid = instruction["instruction_uid"]
instr_type_original = (
instruction.get("type", "").replace(SCL_SUFFIX, "").replace("_error", "")
)
made_change = False
# Check if this instruction *could* generate a condition suitable for grouping
# It must have been processed by the new SymPy method
if (
not instruction.get("type", "").endswith(
SCL_SUFFIX
) # Check if processed by new method
or "_error" in instruction.get("type", "")
or instruction.get("grouped", False)
or instr_type_original
not in [ # Original types that produce boolean results
"Contact",
"O",
"Eq",
"Ne",
"Gt",
"Lt",
"Ge",
"Le",
"PBox",
"NBox",
"And",
"Xor",
"Not", # Add others like comparison
]
):
return False
# Avoid reagruping if SCL already contains a complex IF (less likely now)
current_scl = instruction.get("scl", "")
if (
current_scl.strip().startswith("IF")
and "END_IF;" in current_scl
and GROUPED_COMMENT not in current_scl
):
return False
# *** Get the SymPy expression for the condition ***
map_key_out = (network_id, instr_uid, "out")
sympy_condition_expr = sympy_map.get(map_key_out)
# No SymPy expression found or trivial conditions
if sympy_condition_expr is None or sympy_condition_expr in [
sympy.true,
sympy.false,
]:
return False
# --- Find consumer instructions (logic similar to before) ---
grouped_instructions_cores = []
consumer_instr_list = []
network_logic = next(
(net["logic"] for net in data["networks"] if net["id"] == network_id), []
)
if not network_logic:
return False
groupable_types = [ # Types whose *final SCL* we want to group
"Move",
"Add",
"Sub",
"Mul",
"Div",
"Mod",
"Convert",
"Call_FC",
"Call_FB", # Assuming these generate final SCL in their processors now
# SCoil/RCoil might also be groupable if their SCL is final assignment
"SCoil",
"RCoil",
"BLKMOV", # Added BLKMOV
"TON", "TOF", "TP", "Se", "Sd", # Added timers
"CTU", "CTD", "CTUD", # Added counters
]
for consumer_instr in network_logic:
consumer_uid = consumer_instr["instruction_uid"]
if consumer_instr.get("grouped", False) or consumer_uid == instr_uid:
continue
consumer_en = consumer_instr.get("inputs", {}).get("en")
consumer_type = consumer_instr.get("type", "") # Current type suffix matters
consumer_type_original = consumer_type.replace(SCL_SUFFIX, "").replace(
"_error", ""
)
is_enabled_by_us = False
if (
isinstance(consumer_en, dict)
and consumer_en.get("type") == "connection"
and consumer_en.get("source_instruction_uid") == instr_uid
and consumer_en.get("source_pin") == "out"
):
is_enabled_by_us = True
# Check if consumer is groupable AND has its final SCL generated
if (
is_enabled_by_us
and consumer_type.endswith(SCL_SUFFIX) # Check if processed
and consumer_type_original in groupable_types
):
consumer_scl = consumer_instr.get("scl", "")
# Extract core SCL
core_scl = None
if consumer_scl:
# If consumer SCL itself is an IF generated by EN, take the body
if consumer_scl.strip().startswith("IF"):
match = re.search(
r"IF\s+.*?THEN\s*(.*?)\s*END_IF;", # More robust regex
consumer_scl,
re.DOTALL | re.IGNORECASE,
)
core_scl = match.group(1).strip() if match else None
# If body contains another IF, maybe don't group? (optional complexity)
# if core_scl and core_scl.strip().startswith("IF"): core_scl = None
elif not consumer_scl.strip().startswith(
"//"
): # Otherwise, take the whole line if not comment
core_scl = consumer_scl.strip()
if core_scl:
grouped_instructions_cores.append(core_scl)
consumer_instr_list.append(consumer_instr)
# --- If groupable consumers found ---
if len(grouped_instructions_cores) > 1:
print(
f"INFO: Agrupando {len(grouped_instructions_cores)} instr. bajo condición de {instr_type_original} UID {instr_uid}"
)
# *** Simplify the SymPy condition ***
try:
# simplified_expr = sympy.simplify_logic(sympy_condition_expr, force=True)
simplified_expr = sympy.logic.boolalg.to_dnf(
sympy_condition_expr, simplify=True
)
except Exception as e:
print(f"Error simplifying condition for grouping UID {instr_uid}: {e}")
simplified_expr = sympy_condition_expr # Fallback
# *** Convert simplified condition to SCL string ***
condition_scl_simplified = sympy_expr_to_scl(simplified_expr, symbol_manager)
# *** Build the grouped IF SCL ***
scl_grouped_lines = [f"IF {condition_scl_simplified} THEN"]
for core_line in grouped_instructions_cores:
indented_core = "\n".join(
[f" {line.strip()}" for line in core_line.splitlines()]
)
scl_grouped_lines.append(indented_core)
scl_grouped_lines.append("END_IF;")
final_grouped_scl = "\n".join(scl_grouped_lines)
# Update the generator instruction's SCL
instruction["scl"] = final_grouped_scl
# Mark consumers as grouped
for consumer_instr in consumer_instr_list:
consumer_instr["scl"] = f"{GROUPED_COMMENT} (by UID {instr_uid})"
consumer_instr["grouped"] = True
made_change = True
return made_change
def load_processors(processors_dir="processors"):
"""
Escanea el directorio, importa módulos, construye el mapa y una lista
ordenada por prioridad.
"""
processor_map = {}
processor_list_unsorted = [] # Lista para guardar (priority, type_name, func)
default_priority = 10 # Prioridad si no se define en get_processor_info
if not os.path.isdir(processors_dir):
print(f"Error: Directorio de procesadores no encontrado: '{processors_dir}'")
return processor_map, [] # Devuelve mapa vacío y lista vacía
print(f"Cargando procesadores desde: '{processors_dir}'")
processors_package = os.path.basename(processors_dir)
for filename in os.listdir(processors_dir):
if filename.startswith("process_") and filename.endswith(".py"):
module_name_rel = filename[:-3]
full_module_name = f"{processors_package}.{module_name_rel}"
try:
module = importlib.import_module(full_module_name)
if hasattr(module, "get_processor_info") and callable(
module.get_processor_info
):
processor_info = module.get_processor_info()
info_list = []
if isinstance(processor_info, dict):
info_list = [processor_info]
elif isinstance(processor_info, list):
info_list = processor_info
else:
print(
f" Advertencia: get_processor_info en {full_module_name} devolvió tipo inesperado. Se ignora."
)
continue
for info in info_list:
if (
isinstance(info, dict)
and "type_name" in info
and "processor_func" in info
):
type_name = info["type_name"].lower()
processor_func = info["processor_func"]
# Obtener prioridad, usar default si no existe
priority = info.get("priority", default_priority)
if callable(processor_func):
if type_name in processor_map:
print(
f" Advertencia: '{type_name}' en {full_module_name} sobrescribe definición anterior."
)
processor_map[type_name] = processor_func
# Añadir a la lista para ordenar
processor_list_unsorted.append(
{
"priority": priority,
"type_name": type_name,
"func": processor_func,
}
)
print(
f" - Cargado '{type_name}' (Prio: {priority}) desde {module_name_rel}.py"
)
else:
print(
f" Advertencia: 'processor_func' para '{type_name}' en {full_module_name} no es callable."
)
else:
print(
f" Advertencia: Entrada inválida en {full_module_name}: {info}"
)
else:
print(
f" Advertencia: Módulo {module_name_rel}.py no tiene 'get_processor_info'."
)
except ImportError as e:
print(f"Error importando {full_module_name}: {e}")
except Exception as e:
print(f"Error procesando {full_module_name}: {e}")
traceback.print_exc()
# Ordenar la lista por prioridad (menor primero)
processor_list_sorted = sorted(processor_list_unsorted, key=lambda x: x["priority"])
print(f"\nTotal de tipos de procesadores cargados: {len(processor_map)}")
print(
f"Orden de procesamiento por prioridad: {[item['type_name'] for item in processor_list_sorted]}"
)
# Devolver el mapa (para lookup rápido si es necesario) y la lista ordenada
return processor_map, processor_list_sorted
# --- Bucle Principal de Procesamiento (Modificado para STL y tipo de bloque) ---
def process_json_to_scl(json_filepath):
"""
Lee JSON simplificado, aplica procesadores dinámicos (ignorando redes STL y bloques DB),
y guarda JSON procesado.
"""
global data
if not os.path.exists(json_filepath):
print(f"Error: JSON no encontrado: {json_filepath}")
return
print(f"Cargando JSON desde: {json_filepath}")
try:
with open(json_filepath, "r", encoding="utf-8") as f:
data = json.load(f)
except Exception as e:
print(f"Error al cargar JSON: {e}")
traceback.print_exc()
return
# --- MODIFICADO: Obtener tipo de bloque (FC, FB, GlobalDB, OB) ---
block_type = data.get("block_type", "Unknown") # FC, FB, GlobalDB, OB
print(f"Procesando bloque tipo: {block_type}, Lenguaje principal: {data.get('language', 'Unknown')}")
# --- MODIFICADO: SI ES UN GlobalDB, SALTAR EL PROCESAMIENTO LÓGICO ---
if block_type == "GlobalDB": # <-- Comprobar tipo de bloque
print(
"INFO: El bloque es un Data Block (GlobalDB). Saltando procesamiento lógico de x2."
)
# Simplemente guardamos una copia (o el mismo archivo si no se requiere sufijo)
output_filename = json_filepath.replace(
"_simplified.json", "_simplified_processed.json"
)
print(f"Guardando JSON de DB (sin cambios lógicos) en: {output_filename}")
try:
with open(output_filename, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4, ensure_ascii=False)
print("Guardado de DB completado.")
except Exception as e:
print(f"Error Crítico al guardar JSON del DB: {e}")
traceback.print_exc()
return # <<< SALIR TEMPRANO PARA DBs
# --- SI NO ES DB (FC, FB, OB), CONTINUAR CON EL PROCESAMIENTO LÓGICO ---
print(f"INFO: El bloque es {block_type}. Iniciando procesamiento lógico...") # <-- Mensaje actualizado
script_dir = os.path.dirname(__file__)
processors_dir_path = os.path.join(script_dir, "processors")
processor_map, sorted_processors = load_processors(processors_dir_path)
if not processor_map:
print("Error crítico: No se cargaron procesadores. Abortando.")
return
network_access_maps = {}
# Crear mapas de acceso por red (copiado/adaptado de versión anterior)
for network in data.get("networks", []):
net_id = network["id"]
current_access_map = {}
for instr in network.get("logic", []):
for _, source in instr.get("inputs", {}).items():
sources_to_check = (
source
if isinstance(source, list)
else ([source] if isinstance(source, dict) else [])
)
for src in sources_to_check:
if (
isinstance(src, dict)
and src.get("uid")
and src.get("type") in ["variable", "constant"]
):
current_access_map[src["uid"]] = src
for _, dest_list in instr.get("outputs", {}).items():
if isinstance(dest_list, list):
for dest in dest_list:
if (
isinstance(dest, dict)
and dest.get("uid")
and dest.get("type") in ["variable", "constant"]
):
current_access_map[dest["uid"]] = dest
network_access_maps[net_id] = current_access_map
symbol_manager = SymbolManager()
sympy_map = {}
max_passes = 30
passes = 0
processing_complete = False
print(f"\n--- Iniciando Bucle de Procesamiento Iterativo ({block_type}) ---") # <-- Mensaje actualizado
while passes < max_passes and not processing_complete:
passes += 1
made_change_in_base_pass = False
made_change_in_group_pass = False
print(f"\n--- Pase {passes} ---")
num_sympy_processed_this_pass = 0
num_grouped_this_pass = 0
# --- FASE 1: Procesadores Base (Ignorando STL) ---
print(f" Fase 1 (SymPy Base - Orden por Prioridad):")
num_sympy_processed_this_pass = 0 # Resetear contador para el pase
for processor_info in sorted_processors:
current_type_name = processor_info["type_name"]
func_to_call = processor_info["func"]
for network in data.get("networks", []):
network_id = network["id"]
network_lang = network.get("language", "LAD") # Lenguaje de la red
if network_lang == "STL": # Saltar redes STL
continue
access_map = network_access_maps.get(network_id, {})
network_logic = network.get("logic", [])
for instruction in network_logic:
instr_uid = instruction.get("instruction_uid")
# Usar el tipo *actual* de la instrucción para el lookup
instr_type_current = instruction.get("type", "Unknown")
# Saltar si ya está procesado, es error, agrupado, o tipo crudo
if (
instr_type_current.endswith(SCL_SUFFIX)
or "_error" in instr_type_current
or instruction.get("grouped", False)
or instr_type_current
in ["RAW_STL_CHUNK", "RAW_SCL_CHUNK", "UNSUPPORTED_LANG", "UNSUPPORTED_CONTENT", "PARSING_ERROR"]
):
continue
# El lookup usa el tipo actual (que aún no tiene el sufijo)
lookup_key = instr_type_current.lower()
effective_type_name = lookup_key
# Mapeo especial para llamadas FC/FB
if instr_type_current == "Call":
call_block_type = instruction.get("block_type", "").upper()
if call_block_type == "FC":
effective_type_name = "call_fc"
elif call_block_type == "FB":
effective_type_name = "call_fb"
# Añadir otros tipos de llamada si es necesario
# Si el tipo efectivo coincide con el procesador actual
if effective_type_name == current_type_name:
try:
# Pasar 'data' a la función del procesador
changed = func_to_call(
instruction, network_id, sympy_map, symbol_manager, data
)
if changed:
made_change_in_base_pass = True
num_sympy_processed_this_pass += 1
except Exception as e:
print(
f"ERROR(SymPy Base) al procesar {instr_type_current} UID {instr_uid}: {e}"
)
traceback.print_exc()
instruction["scl"] = (
f"// ERROR en SymPy procesador base: {e}"
)
# Añadir sufijo de error al tipo actual
instruction["type"] = instr_type_current + "_error"
made_change_in_base_pass = True # Se hizo un cambio (marcar como error)
print(
f" -> {num_sympy_processed_this_pass} instrucciones (no STL) procesadas con SymPy."
)
# --- FASE 2: Agrupación IF (Ignorando STL) ---
if (
made_change_in_base_pass or passes == 1
): # Ejecutar siempre en el primer pase o si hubo cambios
print(f" Fase 2 (Agrupación IF con Simplificación):")
num_grouped_this_pass = 0 # Resetear contador para el pase
for network in data.get("networks", []):
network_id = network["id"]
network_lang = network.get("language", "LAD")
if network_lang == "STL":
continue # Saltar STL
network_logic = network.get("logic", [])
# Iterar en orden por UID puede ser más estable para agrupación
uids_in_network = sorted([instr.get("instruction_uid", "Z") for instr in network_logic if instr.get("instruction_uid")])
for uid_to_process in uids_in_network:
instruction = next((instr for instr in network_logic if instr.get("instruction_uid") == uid_to_process), None)
if not instruction: continue
# Saltar si ya está agrupada, es error, etc.
if instruction.get("grouped") or "_error" in instruction.get("type", ""):
continue
# La agrupación sólo aplica a instrucciones que generan condiciones booleanas
# y que ya fueron procesadas (tienen el sufijo)
if instruction.get("type", "").endswith(SCL_SUFFIX):
try:
group_changed = process_group_ifs(
instruction, network_id, sympy_map, symbol_manager, data
)
if group_changed:
made_change_in_group_pass = True
num_grouped_this_pass += 1
except Exception as e:
print(
f"ERROR(GroupLoop) al intentar agrupar desde UID {instruction.get('instruction_uid')}: {e}"
)
traceback.print_exc()
print(
f" -> {num_grouped_this_pass} agrupaciones realizadas (en redes no STL)."
)
# --- Comprobar si se completó el procesamiento ---
if not made_change_in_base_pass and not made_change_in_group_pass:
print(
f"\n--- No se hicieron más cambios en el pase {passes}. Proceso iterativo completado. ---"
)
processing_complete = True
else:
print(
f"--- Fin Pase {passes}: {num_sympy_processed_this_pass} proc SymPy, {num_grouped_this_pass} agrup. Continuando..."
)
# --- Comprobar límite de pases ---
if passes == max_passes and not processing_complete:
print(f"\n--- ADVERTENCIA: Límite de {max_passes} pases alcanzado...")
# --- FIN BUCLE ITERATIVO ---
# --- Verificación Final (Ajustada para RAW_STL_CHUNK) ---
print(f"\n--- Verificación Final de Instrucciones No Procesadas ({block_type}) ---") # <-- Mensaje actualizado
unprocessed_count = 0
unprocessed_details = []
ignored_types = [
"raw_scl_chunk",
"unsupported_lang",
"raw_stl_chunk",
"unsupported_content", # Añadido de x1
"parsing_error", # Añadido de x1
]
for network in data.get("networks", []):
network_id = network.get("id", "Unknown ID")
network_title = network.get("title", f"Network {network_id}")
network_lang = network.get("language", "LAD")
if network_lang == "STL":
continue # No verificar redes STL
for instruction in network.get("logic", []):
instr_uid = instruction.get("instruction_uid", "Unknown UID")
instr_type = instruction.get("type", "Unknown Type")
is_grouped = instruction.get("grouped", False)
if (
not instr_type.endswith(SCL_SUFFIX)
and "_error" not in instr_type
and not is_grouped
and instr_type.lower() not in ignored_types
):
unprocessed_count += 1
unprocessed_details.append(
f" - Red '{network_title}' (ID: {network_id}, Lang: {network_lang}), "
f"Instrucción UID: {instr_uid}, Tipo: '{instr_type}'"
)
if unprocessed_count > 0:
print(
f"ADVERTENCIA: Se encontraron {unprocessed_count} instrucciones (no STL) que parecen no haber sido procesadas:"
)
for detail in unprocessed_details:
print(detail)
else:
print(
"INFO: Todas las instrucciones relevantes (no STL) parecen haber sido procesadas o agrupadas."
)
# --- Guardar JSON Final ---
output_filename = json_filepath.replace(
"_simplified.json", "_simplified_processed.json"
)
print(f"\nGuardando JSON procesado ({block_type}) en: {output_filename}") # <-- Mensaje actualizado
try:
with open(output_filename, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4, ensure_ascii=False)
print("Guardado completado.")
except Exception as e:
print(f"Error Crítico al guardar JSON procesado: {e}")
traceback.print_exc()
# --- Ejecución (sin cambios en esta parte) ---
if __name__ == "__main__":
# Imports necesarios solo para la ejecución como script principal
import argparse
import os
import sys
# Configurar ArgumentParser para recibir la ruta del XML original obligatoria
parser = argparse.ArgumentParser(
description="Process simplified JSON (_simplified.json) to embed SCL logic (SymPy version). Expects original XML filepath as argument."
)
parser.add_argument(
"source_xml_filepath", # Argumento posicional obligatorio
help="Path to the original source XML file (passed from x0_main.py, used to derive JSON input name).",
)
args = parser.parse_args() # Parsea los argumentos de sys.argv
source_xml_file = args.source_xml_filepath # Obtiene la ruta del XML original
# Verificar si el archivo XML original existe (como referencia, útil para depuración)
if not os.path.exists(source_xml_file):
print(
f"Advertencia (x2): Archivo XML original no encontrado: '{source_xml_file}', pero se intentará encontrar el JSON correspondiente."
)
# Derivar nombre del archivo JSON de entrada (_simplified.json)
xml_filename_base = os.path.splitext(os.path.basename(source_xml_file))[0]
# Asumir que el JSON simplificado está en el mismo directorio que el XML original
input_dir = os.path.dirname(source_xml_file) # Directorio del XML original
input_json_file = os.path.join(input_dir, f"{xml_filename_base}_simplified.json")
# Determinar el nombre esperado del archivo JSON procesado de salida
output_json_file = os.path.join(
input_dir, f"{xml_filename_base}_simplified_processed.json"
)
print(
f"(x2) Procesando: '{os.path.relpath(input_json_file)}' -> '{os.path.relpath(output_json_file)}'"
)
# Verificar si el archivo JSON de entrada (_simplified.json) EXISTE antes de procesar
if not os.path.exists(input_json_file):
print(
f"Error Fatal (x2): El archivo de entrada JSON simplificado no existe: '{input_json_file}'"
)
print(
f"Asegúrate de que 'x1_to_json.py' se ejecutó correctamente para '{os.path.relpath(source_xml_file)}'."
)
sys.exit(1) # Salir si el archivo necesario no está
else:
# Llamar a la función principal de procesamiento del script
try:
process_json_to_scl(input_json_file)
except Exception as e:
print(
f"Error Crítico (x2) durante el procesamiento de '{input_json_file}': {e}"
)
import traceback # Asegurar que traceback está importado
traceback.print_exc()
sys.exit(1) # Salir con error si la función principal falla

View File

@ -1,807 +0,0 @@
# x3_generate_scl.py
# -*- coding: utf-8 -*-
import json
import os
import re
import argparse
import sys
import traceback # Importar traceback para errores
# --- Importar Utilidades y Constantes (Asumiendo ubicación) ---
try:
# Intenta importar desde el paquete de procesadores si está estructurado así
from processors.processor_utils import format_variable_name
# Definir SCL_SUFFIX aquí o importarlo si está centralizado
SCL_SUFFIX = "_sympy_processed" # Asegúrate que coincida con x2_process.py
GROUPED_COMMENT = (
"// Logic included in grouped IF" # Opcional, si se usa para filtrar
)
except ImportError:
print(
"Advertencia: No se pudo importar 'format_variable_name' desde processors.processor_utils."
)
print(
"Usando una implementación local básica (¡PUEDE FALLAR CON NOMBRES COMPLEJOS!)."
)
# Implementación local BÁSICA como fallback (MENOS RECOMENDADA)
def format_variable_name(name):
if not name:
return "_INVALID_NAME_"
if name.startswith('"') and name.endswith('"'):
return name # Mantener comillas
prefix = "#" if name.startswith("#") else ""
if prefix:
name = name[1:]
if name and name[0].isdigit():
name = "_" + name
name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
return prefix + name
SCL_SUFFIX = "_sympy_processed"
GROUPED_COMMENT = "// Logic included in grouped IF"
# para formatear valores iniciales
def format_scl_start_value(value, datatype):
"""Formatea un valor para la inicialización SCL según el tipo."""
# Add initial debug print
# print(f"DEBUG format_scl_start_value: value='{value}', datatype='{datatype}'")
if value is None:
return None # Retornar None si no hay valor
datatype_lower = datatype.lower() if datatype else ""
value_str = str(value)
# Intentar quitar comillas si existen (para manejar "TRUE" vs TRUE)
if value_str.startswith('"') and value_str.endswith('"') and len(value_str) > 1:
value_str_unquoted = value_str[1:-1]
elif value_str.startswith("'") and value_str.endswith("'") and len(value_str) > 1:
value_str_unquoted = value_str[1:-1]
else:
value_str_unquoted = value_str
# --- Integer-like types ---
if any(
t in datatype_lower
for t in [
"int",
"byte",
"word",
"dint",
"dword",
"lint",
"lword",
"sint",
"usint",
"uint",
"udint",
"ulint",
]
):
try:
# Intentar convertir el valor (sin comillas) a entero
return str(int(value_str_unquoted))
except ValueError:
# Si no es un entero válido, podría ser una constante simbólica
if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", value_str_unquoted):
return value_str_unquoted # Devolver como símbolo
# --- Fallback for non-integer, non-symbol ---
print(
f"DEBUG format_scl_start_value: Fallback for int-like. value_str_unquoted='{repr(value_str_unquoted)}', datatype='{datatype}'"
) # More debug
# MODIFIED FALLBACK: Escape newlines and use repr() for safety before formatting
try:
# Escape backslashes and single quotes properly for SCL string literal
escaped_for_scl = value_str_unquoted.replace("\\", "\\\\").replace(
"'", "''"
)
# Remove potential newlines that break Python f-string; SCL strings usually don't span lines implicitly
escaped_for_scl = escaped_for_scl.replace("\n", "").replace("\r", "")
# Format as SCL string literal
formatted_scl_string = f"'{escaped_for_scl}'"
print(
f"DEBUG format_scl_start_value: Fallback result='{formatted_scl_string}'"
)
return formatted_scl_string
except Exception as format_exc:
print(
f"ERROR format_scl_start_value: Exception during fallback formatting: {format_exc}"
)
return f"'ERROR_FORMATTING_{value_str_unquoted[:20]}'" # Return an error string
# --- Other types (Bool, Real, String, Char, Time, Date, etc.) ---
elif "bool" in datatype_lower:
# Comparar sin importar mayúsculas/minúsculas y sin comillas
return "TRUE" if value_str_unquoted.lower() == "true" else "FALSE"
elif "string" in datatype_lower:
# Usar el valor sin comillas originales y escapar las internas
escaped_value = value_str_unquoted.replace("'", "''")
return f"'{escaped_value}'"
elif "char" in datatype_lower:
# Usar el valor sin comillas originales y escapar las internas
escaped_value = value_str_unquoted.replace("'", "''")
# SCL usa comillas simples para Char. Asegurar que sea un solo caracter si es posible?
# Por ahora, solo formatear. Longitud se verifica en TIA.
return f"'{escaped_value}'"
elif "real" in datatype_lower or "lreal" in datatype_lower:
try:
# Intentar convertir a float
f_val = float(value_str_unquoted)
s_val = str(f_val)
# Asegurar que tenga punto decimal si es entero
if "." not in s_val and "e" not in s_val.lower():
s_val += ".0"
return s_val
except ValueError:
# Podría ser constante simbólica
if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", value_str_unquoted):
return value_str_unquoted
print(
f"Advertencia: Valor '{value_str}' no reconocido como real o símbolo para tipo {datatype}. Devolviendo como string."
)
# Use the robust fallback formatting here too
escaped_for_scl = (
value_str_unquoted.replace("\\", "\\\\")
.replace("'", "''")
.replace("\n", "")
.replace("\r", "")
)
return f"'{escaped_for_scl}'"
elif "time" in datatype_lower:
# Quitar prefijos y añadir el correcto según el tipo específico
prefix = ""
val_to_use = value_str_unquoted # Usar valor sin comillas
if val_to_use.upper().startswith("T#"):
prefix = "T#"
val_to_use = val_to_use[2:]
elif val_to_use.upper().startswith("LT#"):
prefix = "LT#"
val_to_use = val_to_use[3:]
elif val_to_use.upper().startswith("S5T#"):
prefix = "S5T#"
val_to_use = val_to_use[4:]
if "s5time" in datatype_lower:
return f"S5T#{val_to_use}"
elif "ltime" in datatype_lower:
return f"LT#{val_to_use}"
else:
return f"T#{val_to_use}" # Default a TIME
elif "date" in datatype_lower:
val_to_use = value_str_unquoted
# Handle DTL first as it's longer
if "dtl" in datatype_lower or "date_and_time" in datatype_lower:
prefix = "DTL#" if val_to_use.upper().startswith("DTL#") else "DTL#"
val_to_use = (
val_to_use[4:] if val_to_use.upper().startswith("DTL#") else val_to_use
)
return f"{prefix}{val_to_use}"
elif "dt" in datatype_lower:
prefix = "DT#" if val_to_use.upper().startswith("DT#") else "DT#"
val_to_use = (
val_to_use[3:] if val_to_use.upper().startswith("DT#") else val_to_use
)
return f"{prefix}{val_to_use}"
elif "tod" in datatype_lower or "time_of_day" in datatype_lower:
prefix = "TOD#" if val_to_use.upper().startswith("TOD#") else "TOD#"
val_to_use = (
val_to_use[4:] if val_to_use.upper().startswith("TOD#") else val_to_use
)
return f"{prefix}{val_to_use}"
else: # Default a Date D#
prefix = "D#" if val_to_use.upper().startswith("D#") else "D#"
val_to_use = (
val_to_use[2:] if val_to_use.upper().startswith("D#") else val_to_use
)
return f"{prefix}{val_to_use}"
# --- Fallback for completely unknown types or complex structures ---
else:
# Si es un nombre válido (posiblemente UDT, constante global, etc.), devolverlo tal cual
# Ajustar regex para permitir más caracteres si es necesario
if re.match(
r'^[a-zA-Z_#"][a-zA-Z0-9_."#\[\]%]+$', value_str
): # Permitir % para accesos tipo %DB1.DBD0
# Quitar comillas externas si es un UDT o struct complejo
if (
value_str.startswith('"')
and value_str.endswith('"')
and len(value_str) > 1
):
return value_str[1:-1]
# Mantener comillas si es acceso a DB ("DB_Name".Var)
if '"' in value_str and "." in value_str and value_str.count('"') == 2:
return value_str
# Si no tiene comillas y es un nombre simple o acceso #temp o %I0.0 etc
if not value_str.startswith('"') and not value_str.startswith("'"):
# Formatear nombres simples, pero dejar accesos % y # tal cual
if value_str.startswith("#") or value_str.startswith("%"):
return value_str
else:
# return format_variable_name(value_str) # Evitar formatear aquí, puede ser una constante
return value_str # Return as is if it looks symbolic
# Devolver el valor original si tiene comillas internas o estructura compleja no manejada arriba
return value_str
else:
# Si no parece un nombre/símbolo/acceso, tratarlo como string (último recurso)
print(
f"DEBUG format_scl_start_value: Fallback final. value_str_unquoted='{repr(value_str_unquoted)}', datatype='{datatype}'"
)
# Use the robust fallback formatting
escaped_for_scl = (
value_str_unquoted.replace("\\", "\\\\")
.replace("'", "''")
.replace("\n", "")
.replace("\r", "")
)
return f"'{escaped_for_scl}'"
# ... (generate_scl_declarations and generate_scl function remain the same as the previous version) ...
# --- (Incluye aquí las funciones generate_scl_declarations y generate_scl SIN CAMBIOS respecto a la respuesta anterior) ---
# --- NUEVA FUNCIÓN RECURSIVA para generar declaraciones SCL (VAR/STRUCT/ARRAY) ---
def generate_scl_declarations(variables, indent_level=1):
"""Genera las líneas SCL para declarar variables, structs y arrays."""
scl_lines = []
indent = " " * indent_level
for var in variables:
var_name_scl = format_variable_name(var.get("name"))
var_dtype_raw = var.get("datatype", "VARIANT")
var_comment = var.get("comment")
start_value = var.get("start_value")
children = var.get("children") # Para structs
array_elements = var.get("array_elements") # Para arrays
# Limpiar comillas del tipo de dato si es UDT/String/etc.
var_dtype_cleaned = var_dtype_raw
if isinstance(var_dtype_raw, str):
if var_dtype_raw.startswith('"') and var_dtype_raw.endswith('"'):
var_dtype_cleaned = var_dtype_raw[1:-1]
# Manejar caso 'Array [...] of "MyUDT"'
array_match = re.match(
r'(Array\[.*\]\s+of\s+)"(.*)"', var_dtype_raw, re.IGNORECASE
)
if array_match:
var_dtype_cleaned = f"{array_match.group(1)}{array_match.group(2)}" # Quitar comillas del tipo base
# Determinar tipo base para inicialización (importante para arrays)
base_type_for_init = var_dtype_cleaned
array_prefix_for_decl = ""
if var_dtype_cleaned.lower().startswith("array["):
match = re.match(
r"(Array\[.*\]\s+of\s+)(.*)", var_dtype_cleaned, re.IGNORECASE
)
if match:
array_prefix_for_decl = match.group(1)
base_type_for_init = match.group(2).strip()
# Construir tipo de dato para la declaración SCL
declaration_dtype = var_dtype_raw # Usar el raw por defecto
# Si es UDT o tipo complejo que requiere comillas y no es array simple
if base_type_for_init != var_dtype_cleaned and not array_prefix_for_decl:
# Poner comillas si no las tiene ya el tipo base
if not base_type_for_init.startswith('"'):
declaration_dtype = f'"{base_type_for_init}"'
else:
declaration_dtype = base_type_for_init # Ya tiene comillas
# Si es array de UDT/complejo, reconstruir con comillas en el tipo base
elif array_prefix_for_decl and base_type_for_init != var_dtype_cleaned:
if not base_type_for_init.startswith('"'):
declaration_dtype = f'{array_prefix_for_decl}"{base_type_for_init}"'
else:
declaration_dtype = f"{array_prefix_for_decl}{base_type_for_init}"
declaration_line = f"{indent}{var_name_scl} : {declaration_dtype}"
init_value_scl = None
# ---- Arrays ----
if array_elements:
# Ordenar índices (asumiendo que son numéricos '0', '1', ...)
try:
# Extraer números de los índices string
indices_numeric = {int(k): v for k, v in array_elements.items()}
sorted_indices = sorted(indices_numeric.keys())
# Mapear de nuevo a string para buscar valor
sorted_indices_str = [str(k) for k in sorted_indices]
except ValueError:
# Fallback a orden alfabético si los índices no son números
print(
f"Advertencia: Índices de array no numéricos para '{var_name_scl}'. Usando orden alfabético."
)
sorted_indices_str = sorted(array_elements.keys())
init_values = []
for idx_str in sorted_indices_str:
try:
formatted_val = format_scl_start_value(
array_elements[idx_str], base_type_for_init
)
init_values.append(formatted_val)
except Exception as e_fmt:
print(
f"ERROR: Falló formateo para índice {idx_str} de array '{var_name_scl}'. Valor: {array_elements[idx_str]}. Error: {e_fmt}"
)
init_values.append(f"/*ERR_FMT_{idx_str}*/") # Placeholder de error
# Filtrar Nones que pueden venir de format_scl_start_value si el valor era None
valid_inits = [v for v in init_values if v is not None]
if valid_inits:
# Si todos los valores son iguales y es un array grande, podríamos usar notación x(value)
# Simplificación: por ahora, listar todos
init_value_scl = f"[{', '.join(valid_inits)}]"
elif array_elements: # Si había elementos pero todos formatearon a None
print(
f"Advertencia: Todos los valores iniciales para array '{var_name_scl}' son None o inválidos."
)
# ---- Structs ----
elif children:
# El valor inicial de un struct se maneja recursivamente dentro
# Añadir comentario? Puede ser redundante.
scl_lines.append(
declaration_line
) # Añadir línea de declaración base STRUCT
scl_lines.append(f"{indent}STRUCT")
# Llamada recursiva para los miembros internos
scl_lines.extend(generate_scl_declarations(children, indent_level + 1))
scl_lines.append(f"{indent}END_STRUCT;")
if var_comment: # Comentario después de END_STRUCT
scl_lines.append(f"{indent}// {var_comment}")
scl_lines.append("") # Línea extra para legibilidad
continue # Saltar el resto de la lógica para este struct
# ---- Tipos Simples ----
else:
if start_value is not None:
try:
init_value_scl = format_scl_start_value(
start_value, base_type_for_init
) # Usar tipo base
except Exception as e_fmt_simple:
print(
f"ERROR: Falló formateo para valor simple de '{var_name_scl}'. Valor: {start_value}. Error: {e_fmt_simple}"
)
init_value_scl = f"/*ERR_FMT_SIMPLE*/" # Placeholder
# Añadir inicialización si existe y no es None
if init_value_scl is not None:
declaration_line += f" := {init_value_scl}"
declaration_line += ";"
# Añadir comentario si existe
if var_comment:
declaration_line += f" // {var_comment}"
scl_lines.append(declaration_line)
return scl_lines
# --- Función Principal de Generación SCL ---
def generate_scl(processed_json_filepath, output_scl_filepath):
"""Genera un archivo SCL a partir del JSON procesado (FC/FB/OB o DB).""" # Actualizado
if not os.path.exists(processed_json_filepath):
print(
f"Error: Archivo JSON procesado no encontrado en '{processed_json_filepath}'"
)
return
print(f"Cargando JSON procesado desde: {processed_json_filepath}")
try:
with open(processed_json_filepath, "r", encoding="utf-8") as f:
data = json.load(f)
except Exception as e:
print(f"Error al cargar o parsear JSON: {e}")
traceback.print_exc()
return
# --- Extracción de Información del Bloque (Común) ---
block_name = data.get("block_name", "UnknownBlock")
block_number = data.get("block_number")
# block_lang_original = data.get("language", "Unknown") # Lenguaje original (SCL, LAD, DB...)
block_type = data.get(
"block_type", "Unknown"
) # Tipo de bloque (FC, FB, GlobalDB, OB) <-- Usar este
block_comment = data.get("block_comment", "")
scl_block_name = format_variable_name(block_name) # Nombre SCL seguro
print(
f"Generando SCL para: {block_type} '{scl_block_name}' (Original: {block_name})" # Quitado lenguaje original del log
)
scl_output = []
# --- MODIFICADO: GENERACIÓN PARA DATA BLOCK (GlobalDB) ---
if block_type == "GlobalDB": # <-- Comprobar tipo de bloque
print("Modo de generación: DATA_BLOCK")
scl_output.append(f"// Block Type: {block_type}")
scl_output.append(f"// Block Name (Original): {block_name}")
if block_number:
scl_output.append(f"// Block Number: {block_number}")
if block_comment:
# Dividir comentarios largos en múltiples líneas
comment_lines = block_comment.splitlines()
scl_output.append(f"// Block Comment:")
for line in comment_lines:
scl_output.append(f"// {line}")
scl_output.append("")
scl_output.append(f'DATA_BLOCK "{scl_block_name}"')
scl_output.append("{ S7_Optimized_Access := 'TRUE' }") # Asumir optimizado
scl_output.append("VERSION : 0.1")
scl_output.append("")
interface_data = data.get("interface", {})
# En DBs, la sección relevante suele ser 'Static'
static_vars = interface_data.get("Static", [])
if static_vars:
scl_output.append("VAR")
# Usar la función recursiva para generar declaraciones
scl_output.extend(generate_scl_declarations(static_vars, indent_level=1))
scl_output.append("END_VAR")
scl_output.append("")
else:
print(
"Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB."
)
# Añadir bloque VAR vacío si no hay variables
scl_output.append("VAR")
scl_output.append("END_VAR")
scl_output.append("")
scl_output.append("BEGIN")
scl_output.append(
" // Los Data Blocks no tienen código ejecutable en BEGIN/END"
)
scl_output.append("END_DATA_BLOCK")
# --- MODIFICADO: GENERACIÓN PARA FC/FB/OB ---
else:
# Determinar palabra clave SCL
scl_block_keyword = "FUNCTION_BLOCK" # Default
if block_type == "FC":
scl_block_keyword = "FUNCTION"
elif block_type == "OB":
scl_block_keyword = "ORGANIZATION_BLOCK"
elif block_type == "FB":
scl_block_keyword = "FUNCTION_BLOCK"
else: # Fallback
print(
f"Advertencia: Tipo de bloque desconocido '{block_type}', usando FUNCTION_BLOCK."
)
scl_block_keyword = "FUNCTION_BLOCK" # O quizás lanzar error?
print(f"Modo de generación: {scl_block_keyword}")
# Cabecera del Bloque
scl_output.append(f"// Block Type: {block_type}")
scl_output.append(f"// Block Name (Original): {block_name}")
if block_number:
scl_output.append(f"// Block Number: {block_number}")
# Indicar lenguaje original de las redes si es relevante
original_net_langs = set(
n.get("language", "Unknown") for n in data.get("networks", [])
)
scl_output.append(
f"// Original Network Languages: {', '.join(l for l in original_net_langs if l != 'Unknown')}"
)
if block_comment:
comment_lines = block_comment.splitlines()
scl_output.append(f"// Block Comment:")
for line in comment_lines:
scl_output.append(f"// {line}")
scl_output.append("")
# Manejar tipo de retorno para FUNCTION (FC)
return_type = "Void" # Default
interface_data = data.get("interface", {})
if scl_block_keyword == "FUNCTION" and interface_data.get("Return"):
# Asumir un solo valor de retorno
return_member = interface_data["Return"][0]
return_type_raw = return_member.get("datatype", "Void")
# Limpiar comillas si es UDT/String
return_type = (
return_type_raw[1:-1]
if isinstance(return_type_raw, str)
and return_type_raw.startswith('"')
and return_type_raw.endswith('"')
else return_type_raw
)
# Añadir comillas si es UDT y no las tenía
if (
return_type != return_type_raw
and not return_type_raw.lower().startswith("array")
):
return_type = f'"{return_type}"'
else: # Mantener raw si es tipo básico o ya tenía comillas
return_type = return_type_raw
# Línea de declaración del bloque
if scl_block_keyword == "FUNCTION":
scl_output.append(f'{scl_block_keyword} "{scl_block_name}" : {return_type}')
else: # FB y OB
scl_output.append(f'{scl_block_keyword} "{scl_block_name}"')
# Atributos y versión
scl_output.append("{ S7_Optimized_Access := 'TRUE' }") # Asumir optimizado
scl_output.append("VERSION : 0.1")
scl_output.append("")
# Declaraciones de Interfaz (Input, Output, InOut, Static, Temp, Constant)
# Orden estándar SCL
section_order = ["Input", "Output", "InOut", "Static", "Temp", "Constant"]
declared_temps = set() # Para rastrear temps ya declaradas
has_declarations = False
for section_name in section_order:
vars_in_section = interface_data.get(section_name, [])
if vars_in_section:
has_declarations = True
# Mapeo de nombres de sección JSON a palabras clave SCL VAR_
scl_section_keyword = f"VAR_{section_name.upper()}"
if section_name == "Static":
scl_section_keyword = "VAR_STAT" # Para FBs
if section_name == "Temp":
scl_section_keyword = "VAR_TEMP"
if section_name == "Constant":
scl_section_keyword = "CONSTANT" # CONSTANT no usa VAR_
scl_output.append(scl_section_keyword)
# Usar la función recursiva para generar declaraciones
scl_output.extend(
generate_scl_declarations(vars_in_section, indent_level=1)
)
# Añadir END_VAR (o END_CONSTANT)
scl_output.append(
"END_VAR" if section_name != "Constant" else "END_CONSTANT"
)
scl_output.append("") # Línea en blanco
# Guardar nombres de Temp declarados explícitamente
if section_name == "Temp":
declared_temps.update(
format_variable_name(v.get("name"))
for v in vars_in_section
if v.get("name")
)
# Declaraciones VAR_TEMP adicionales (auto-detectadas)
# Buscar variables que empiecen con #_temp_ en el SCL generado
temp_vars_detected = set()
# Patrón para encontrar #variable o "#variable"
temp_pattern = re.compile(
r'"?(#\w+)"?'
) # Busca # seguido de caracteres alfanuméricos
for network in data.get("networks", []):
for instruction in network.get("logic", []):
# Revisar el SCL final y el SCL de actualización de memoria si existe
scl_code = instruction.get("scl", "")
edge_update_code = instruction.get(
"_edge_mem_update_scl", ""
) # Para flancos
code_to_scan = (
(scl_code if scl_code else "")
+ "\n"
+ (edge_update_code if edge_update_code else "")
)
if code_to_scan:
# Usar findall para encontrar todas las ocurrencias
found_temps = temp_pattern.findall(code_to_scan)
for temp_name in found_temps:
# findall devuelve el grupo capturado (#...)
if temp_name:
temp_vars_detected.add(temp_name)
# Filtrar las que ya estaban declaradas
additional_temps = sorted(list(temp_vars_detected - declared_temps))
if additional_temps:
print(f"INFO: Detectadas {len(additional_temps)} VAR_TEMP adicionales.")
# Si no se declaró la sección Temp antes, añadirla ahora
if "Temp" not in interface_data or not interface_data["Temp"]:
scl_output.append("VAR_TEMP")
for temp_name in additional_temps:
# Formatear por si acaso, aunque el patrón ya debería dar #nombre
scl_name = format_variable_name(temp_name)
# Inferir tipo (Bool es lo más común para temporales internos)
# Se podría mejorar si el nombre da pistas (ej. _temp_r para Real)
inferred_type = "Bool" # Asumir Bool por defecto
scl_output.append(
f" {scl_name} : {inferred_type}; // Auto-generated temporary"
)
# Si abrimos la sección aquí, cerrarla
if "Temp" not in interface_data or not interface_data["Temp"]:
scl_output.append("END_VAR")
scl_output.append("")
# --- Cuerpo del Bloque (BEGIN...END) ---
scl_output.append("BEGIN")
scl_output.append("")
# Iterar por redes y lógica (incluyendo manejo STL/SCL crudo)
for i, network in enumerate(data.get("networks", [])):
network_title = network.get(
"title", f'Network {network.get("id", i+1)}'
) # Usar i+1 si falta ID
network_comment = network.get("comment", "")
network_lang = network.get("language", "LAD") # Lenguaje original de la red
scl_output.append(
f" // Network {i+1}: {network_title} (Original Language: {network_lang})"
)
if network_comment:
# Indentar comentarios de red
for line in network_comment.splitlines():
scl_output.append(f" // {line}")
scl_output.append("") # Línea en blanco antes del código de red
network_has_code = False
logic_in_network = network.get("logic", [])
if not logic_in_network:
scl_output.append(f" // Network {i+1} has no logic elements.")
scl_output.append("")
continue
# --- Manejo Especial Redes STL ---
if network_lang == "STL":
# Asumir que la lógica STL está en el primer elemento como RAW_STL_CHUNK
if logic_in_network[0].get("type") == "RAW_STL_CHUNK":
network_has_code = True
raw_stl_code = logic_in_network[0].get(
"stl", "// ERROR: STL code missing"
)
# Incrustar STL como comentario multi-línea o delimitado
scl_output.append(f" // --- BEGIN STL Network {i+1} ---")
# Comentar cada línea STL
for stl_line in raw_stl_code.splitlines():
scl_output.append(f" // {stl_line}")
scl_output.append(f" // --- END STL Network {i+1} ---")
scl_output.append("") # Línea en blanco después
else:
scl_output.append(
f" // ERROR: Contenido STL inesperado en Network {i+1}."
)
scl_output.append("")
# --- Manejo Redes SCL/LAD/FBD procesadas ---
else:
# Iterar por las instrucciones procesadas
for instruction in logic_in_network:
instruction_type = instruction.get("type", "")
scl_code = instruction.get("scl", "")
is_grouped = instruction.get("grouped", False)
# Saltar instrucciones agrupadas (su lógica está en el IF)
if is_grouped:
continue
# Incluir SCL si la instrucción fue procesada o es un chunk crudo/error/placeholder
if (
instruction_type.endswith(SCL_SUFFIX)
or instruction_type
in [
"RAW_SCL_CHUNK",
"UNSUPPORTED_LANG",
"UNSUPPORTED_CONTENT",
"PARSING_ERROR",
]
or "_error" in instruction_type # Incluir errores comentados
) and scl_code:
# Comprobar si el SCL es solo un comentario (a menos que sea un bloque IF)
is_only_comment = all(
line.strip().startswith("//")
for line in scl_code.splitlines()
if line.strip()
)
is_if_block = scl_code.strip().startswith("IF")
# Añadir el SCL indentado si no es solo un comentario (o si es un IF/Error)
if (
not is_only_comment
or is_if_block
or "_error" in instruction_type
or instruction_type
in [
"UNSUPPORTED_LANG",
"UNSUPPORTED_CONTENT",
"PARSING_ERROR",
]
):
network_has_code = True
for line in scl_code.splitlines():
scl_output.append(f" {line}") # Indentar código
# Añadir línea en blanco después de cada bloque SCL para legibilidad
scl_output.append("")
# Si la red no produjo código SCL imprimible (ej. solo lógica interna)
if (
not network_has_code and network_lang != "STL"
): # No añadir para STL ya comentado
scl_output.append(
f" // Network {i+1} did not produce printable SCL code."
)
scl_output.append("")
# Fin del bloque FC/FB/OB
scl_output.append(f"END_{scl_block_keyword}") # <-- Usar keyword determinada
# --- Escritura del Archivo SCL (Común) ---
print(f"Escribiendo archivo SCL en: {output_scl_filepath}")
try:
with open(output_scl_filepath, "w", encoding="utf-8") as f:
for line in scl_output:
f.write(line + "\n")
print("Generación de SCL completada.")
except Exception as e:
print(f"Error al escribir el archivo SCL: {e}")
traceback.print_exc()
# --- Ejecución ---
if __name__ == "__main__":
# Imports necesarios solo para la ejecución como script principal
import argparse
import os
import sys
import traceback # Asegurarse que traceback está importado
# Configurar ArgumentParser para recibir la ruta del XML original obligatoria
parser = argparse.ArgumentParser(
description="Generate final SCL file from processed JSON (_simplified_processed.json). Expects original XML filepath as argument."
)
parser.add_argument(
"source_xml_filepath", # Argumento posicional obligatorio
help="Path to the original source XML file (passed from x0_main.py, used to derive input/output names).",
)
args = parser.parse_args() # Parsea los argumentos de sys.argv
source_xml_file = args.source_xml_filepath # Obtiene la ruta del XML original
# Verificar si el archivo XML original existe (como referencia)
if not os.path.exists(source_xml_file):
print(
f"Advertencia (x3): Archivo XML original no encontrado: '{source_xml_file}', pero se intentará encontrar el JSON procesado."
)
# Derivar nombres de archivos de entrada (JSON procesado) y salida (SCL)
xml_filename_base = os.path.splitext(os.path.basename(source_xml_file))[0]
# Asumir que los archivos están en el mismo directorio que el XML original
base_dir = os.path.dirname(source_xml_file) # Directorio del XML original
input_json_file = os.path.join(
base_dir, f"{xml_filename_base}_simplified_processed.json"
)
# Cambiar extensión de salida a .scl
output_scl_file = os.path.join(
base_dir, f"{xml_filename_base}_generated.scl" # Cambiado nombre de salida
)
print(
f"(x3) Generando SCL: '{os.path.relpath(input_json_file)}' -> '{os.path.relpath(output_scl_file)}'"
)
# Verificar si el archivo JSON procesado de entrada EXISTE
if not os.path.exists(input_json_file):
print(
f"Error Fatal (x3): Archivo JSON procesado no encontrado: '{input_json_file}'"
)
print(
f"Asegúrate de que 'x2_process.py' se ejecutó correctamente para '{os.path.relpath(source_xml_file)}'."
)
sys.exit(1) # Salir si el archivo necesario no está
else:
# Llamar a la función principal de generación SCL del script
try:
generate_scl(input_json_file, output_scl_file)
sys.exit(0) # Salir con éxito explícitamente
except Exception as e:
print(
f"Error Crítico (x3) durante la generación de SCL desde '{input_json_file}': {e}"
)
# traceback ya debería estar importado
traceback.print_exc()
sys.exit(1) # Salir con error si la función principal falla