diff --git a/__pycache__/config_manager.cpython-312.pyc b/__pycache__/config_manager.cpython-312.pyc index 263e6e1..058fdb0 100644 Binary files a/__pycache__/config_manager.cpython-312.pyc and b/__pycache__/config_manager.cpython-312.pyc differ diff --git a/backend/script_groups/ObtainIOFromProjectTia/log_x2.txt b/backend/script_groups/ObtainIOFromProjectTia/log_x2.txt new file mode 100644 index 0000000..e1a831a --- /dev/null +++ b/backend/script_groups/ObtainIOFromProjectTia/log_x2.txt @@ -0,0 +1,38 @@ +--- Log de Ejecución: x2.py --- +Grupo: ObtainIOFromProjectTia +Directorio de Trabajo: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport +Inicio: 2025-05-02 23:34:21 +Fin: 2025-05-02 23:36:20 +Duración: 0:01:58.373747 +Estado: SUCCESS (Código de Salida: 0) + +--- SALIDA ESTÁNDAR (STDOUT) --- +--- TIA Portal Project CAx Exporter and Analyzer --- + +Selected Project: C:/Trabajo/SIDEL/06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)/InLavoro/PLC/SAE196_c0.2/SAE196_c0.2.ap18 +Using Output Directory (Working Directory): C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport +Will export CAx data to: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.aml +Will generate summary to: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Summary.md +Export log file: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.log + +Connecting to TIA Portal V18.0... +2025-05-02 23:34:30,132 [1] INFO Siemens.TiaPortal.OpennessApi18.Implementations.Global OpenPortal - Start TIA Portal, please acknowledge the security dialog. +2025-05-02 23:34:30,155 [1] INFO Siemens.TiaPortal.OpennessApi18.Implementations.Global OpenPortal - With user interface +Connected. +Opening project: SAE196_c0.2.ap18... +2025-05-02 23:35:01,950 [1] INFO Siemens.TiaPortal.OpennessApi18.Implementations.Portal OpenProject - Open project... C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\InLavoro\PLC\SAE196_c0.2\SAE196_c0.2.ap18 +Project opened. +Exporting CAx data for the project to C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.aml... +CAx data exported successfully. + +Closing TIA Portal... +2025-05-02 23:36:15,947 [1] INFO Siemens.TiaPortal.OpennessApi18.Implementations.Portal ClosePortal - Close TIA Portal +TIA Portal closed. +Parsing AML file: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.aml +Markdown summary written to: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Summary.md + +Script finished. + +--- ERRORES (STDERR) --- +Ninguno +--- FIN DEL LOG --- diff --git a/backend/script_groups/ObtainIOFromProjectTia/log_x3.txt b/backend/script_groups/ObtainIOFromProjectTia/log_x3.txt new file mode 100644 index 0000000..a12885b --- /dev/null +++ b/backend/script_groups/ObtainIOFromProjectTia/log_x3.txt @@ -0,0 +1,48 @@ +--- Log de Ejecución: x3.py --- +Grupo: ObtainIOFromProjectTia +Directorio de Trabajo: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport +Inicio: 2025-05-02 23:43:07 +Fin: 2025-05-02 23:43:12 +Duración: 0:00:05.235415 +Estado: SUCCESS (Código de Salida: 0) + +--- SALIDA ESTÁNDAR (STDOUT) --- +--- AML (CAx Export) to Hierarchical JSON and Obsidian MD Converter (v28 - Working Directory Integration) --- +Using Working Directory for Output: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport +Input AML: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.aml +Output Directory: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport +Output JSON: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.hierarchical.json +Output Main Tree MD: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export_Hardware_Tree.md +Output IO Debug Tree MD: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export_IO_Upward_Debug.md +Processing AML file: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.aml +Pass 1: Found 203 InternalElement(s). Populating device dictionary... +Pass 2: Identifying PLCs and Networks (Refined v2)... + Identified Network: PROFIBUS_1 (bcc6f2bd-3d71-4407-90f2-bccff6064051) Type: Profibus + Identified Network: ETHERNET_1 (c6d49787-a076-4592-994d-876eea123dfd) Type: Ethernet/Profinet + Identified PLC: PLC (a48e038f-0bcc-4b48-8373-033da316c62b) - Type: CPU 1516F-3 PN/DP OrderNo: 6ES7 516-3FP03-0AB0 +Pass 3: Processing InternalLinks (Robust Network Mapping & IO)... +Found 118 InternalLink(s). + Mapping Device/Node 'E1' (NodeID:1643b51f-7067-4565-8f8e-109a1a775fed, Addr:10.1.33.11) to Network 'ETHERNET_1' + --> Associating Network 'ETHERNET_1' with PLC 'PLC' (via Node 'E1' Addr: 10.1.33.11) + Mapping Device/Node 'P1' (NodeID:5aff409b-2573-485f-82bf-0e08c9200086, Addr:1) to Network 'PROFIBUS_1' + --> Associating Network 'PROFIBUS_1' with PLC 'PLC' (via Node 'P1' Addr: 1) + Mapping Device/Node 'PB1' (NodeID:c796e175-c770-43f0-8191-fc91996c0147, Addr:12) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:0b44f55a-63c1-49e8-beea-24dc5d3226e3, Addr:20) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:25cfc251-f946-40c5-992d-ad6387677acb, Addr:21) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:57999375-ec72-46ef-8ec2-6c3178e8acf8, Addr:22) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:54e8db6a-9443-41a4-a85b-cf0722c1d299, Addr:10) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:4786bab6-4097-4651-ac19-6cadfc7ea735, Addr:8) to Network 'PROFIBUS_1' + Mapping Device/Node 'PB1' (NodeID:1f08afcb-111f-428f-915e-69363af1b09a, Addr:40) to Network 'PROFIBUS_1' +Data extraction and structuring complete. +Generating JSON output: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export.hierarchical.json +JSON data written successfully. + +Markdown summary written to: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export_Hardware_Tree.md + +IO upward debug tree written to: C:\Trabajo\SIDEL\06 - E5.007363 - Modifica O&U - SAE196 (cip integrato)\Reporte\IOExport\SAE196_c0.2_CAx_Export_IO_Upward_Debug.md + +Script finished. + +--- ERRORES (STDERR) --- +Ninguno +--- FIN DEL LOG --- diff --git a/backend/script_groups/ObtainIOFromProjectTia/x3.py b/backend/script_groups/ObtainIOFromProjectTia/x3.py index 833b91d..9233729 100644 --- a/backend/script_groups/ObtainIOFromProjectTia/x3.py +++ b/backend/script_groups/ObtainIOFromProjectTia/x3.py @@ -6,6 +6,7 @@ generar un archivo Markdown con la información. import os import sys +import tkinter as tk from tkinter import filedialog import traceback from lxml import etree as ET @@ -732,9 +733,9 @@ def generate_markdown_tree(project_data, md_file_path): end_byte = start_byte + length_bytes - 1 prefix = "P?" if io_type.lower() == "input": - prefix = "PE" + prefix = "EW" elif io_type.lower() == "output": - prefix = "PA" + prefix = "AW" siemens_addr = f"{prefix} {start_byte}..{end_byte}" except Exception: # Catch any error during calc/format siemens_addr = ( @@ -963,8 +964,8 @@ def select_cax_file(initial_dir=None): # Add initial_dir parameter root = tk.Tk() root.withdraw() file_path = filedialog.askopenfilename( - title="Select CAx Export File (XML)", - filetypes=[("XML Files", "*.xml"), ("AML Files", "*.aml"), ("All Files", "*.*")], # Added AML + title="Select CAx Export File (AML)", + filetypes=[ ("AML Files", "*.aml"), ("All Files", "*.*")], # Added AML initialdir=initial_dir # Set the initial directory ) root.destroy() diff --git a/backend/script_groups/XML Parser to SCL/esquema_group.json b/backend/script_groups/XML Parser to SCL/esquema_group.json new file mode 100644 index 0000000..1c9e43a --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/esquema_group.json @@ -0,0 +1,4 @@ +{ + "type": "object", + "properties": {} +} \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/esquema_work.json b/backend/script_groups/XML Parser to SCL/esquema_work.json new file mode 100644 index 0000000..1c9e43a --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/esquema_work.json @@ -0,0 +1,4 @@ +{ + "type": "object", + "properties": {} +} \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/generators/__init__.py b/backend/script_groups/XML Parser to SCL/generators/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..bcc9ca1 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8d577a7 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-310.pyc new file mode 100644 index 0000000..bb2358f Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-312.pyc new file mode 100644 index 0000000..5af10f6 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_tag_table.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-310.pyc new file mode 100644 index 0000000..dd05bc6 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-312.pyc new file mode 100644 index 0000000..77122fa Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_md_udt.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-310.pyc new file mode 100644 index 0000000..b46f218 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-312.pyc new file mode 100644 index 0000000..590b9aa Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_code_block.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-310.pyc new file mode 100644 index 0000000..44c2202 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-312.pyc new file mode 100644 index 0000000..2ef5cc7 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generate_scl_db.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-310.pyc new file mode 100644 index 0000000..83c2eb2 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-312.pyc new file mode 100644 index 0000000..6ff8125 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/generators/__pycache__/generator_utils.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/generators/generate_md_tag_table.py b/backend/script_groups/XML Parser to SCL/generators/generate_md_tag_table.py new file mode 100644 index 0000000..5106d21 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/generators/generate_md_tag_table.py @@ -0,0 +1,28 @@ +# generators/generate_md_tag_table.py +# -*- coding: utf-8 -*- + +def generate_tag_table_markdown(data): + """Genera contenido Markdown para una tabla de tags.""" + md_lines = [] + table_name = data.get("block_name", "UnknownTagTable") + tags = data.get("tags", []) + + md_lines.append(f"# Tag Table: {table_name}") + md_lines.append("") + + if tags: + md_lines.append("| Name | Datatype | Address | Comment |") + md_lines.append("|---|---|---|---|") + for tag in tags: + name = tag.get("name", "N/A") + datatype = tag.get("datatype", "N/A") + address = tag.get("address", "N/A") or " " + comment_raw = tag.get("comment") + comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else "" + md_lines.append(f"| `{name}` | `{datatype}` | `{address}` | {comment} |") + md_lines.append("") + else: + md_lines.append("No tags found in this table.") + md_lines.append("") + + return md_lines \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/generators/generate_md_udt.py b/backend/script_groups/XML Parser to SCL/generators/generate_md_udt.py new file mode 100644 index 0000000..55c1a88 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/generators/generate_md_udt.py @@ -0,0 +1,46 @@ +# generators/generate_md_udt.py +# -*- coding: utf-8 -*- +import re +from .generator_utils import format_scl_start_value # Importar utilidad necesaria + +def generate_markdown_member_rows(members, level=0): + """Genera filas Markdown para miembros de UDT (recursivo).""" + md_rows = []; prefix = "    " * level + for member in members: + name = member.get("name", "N/A"); datatype = member.get("datatype", "N/A") + start_value_raw = member.get("start_value") + start_value_fmt = format_scl_start_value(start_value_raw, datatype) if start_value_raw is not None else "" + comment_raw = member.get("comment"); comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else "" + md_rows.append(f"| {prefix}`{name}` | `{datatype}` | `{start_value_fmt}` | {comment} |") + children = member.get("children") + if children: md_rows.extend(generate_markdown_member_rows(children, level + 1)) + array_elements = member.get("array_elements") + if array_elements: + base_type_for_init = datatype + if isinstance(datatype, str) and datatype.lower().startswith("array["): + match = re.match(r"(Array\[.*\]\s+of\s+)(.*)", datatype, re.IGNORECASE) + if match: base_type_for_init = match.group(2).strip() + md_rows.append(f"| {prefix}  *(Initial Values)* | | | |") + try: + indices_numeric = {int(k): v for k, v in array_elements.items()} + sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())] + except ValueError: sorted_indices_str = sorted(array_elements.keys()) + for idx_str in sorted_indices_str: + val_raw = array_elements[idx_str] + val_fmt = format_scl_start_value(val_raw, base_type_for_init) if val_raw is not None else "" + md_rows.append(f"| {prefix}  `[{idx_str}]` | | `{val_fmt}` | |") + return md_rows + +def generate_udt_markdown(data): + """Genera contenido Markdown para un UDT.""" + md_lines = []; udt_name = data.get("block_name", "UnknownUDT"); udt_comment = data.get("block_comment", "") + md_lines.append(f"# UDT: {udt_name}"); md_lines.append("") + if udt_comment: md_lines.append(f"**Comment:**"); [md_lines.append(f"> {line}") for line in udt_comment.splitlines()]; md_lines.append("") + members = data.get("interface", {}).get("None", []) + if members: + md_lines.append("## Members"); md_lines.append("") + md_lines.append("| Name | Datatype | Start Value | Comment |"); md_lines.append("|---|---|---|---|") + md_lines.extend(generate_markdown_member_rows(members)) + md_lines.append("") + else: md_lines.append("No members found in the UDT interface."); md_lines.append("") + return md_lines \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/generators/generate_scl_code_block.py b/backend/script_groups/XML Parser to SCL/generators/generate_scl_code_block.py new file mode 100644 index 0000000..f5c5a47 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/generators/generate_scl_code_block.py @@ -0,0 +1,285 @@ +# ToUpload/generators/generate_scl_code_block.py +# -*- coding: utf-8 -*- +import re +import os # Importar os +from .generator_utils import format_variable_name, generate_scl_declarations + +SCL_SUFFIX = "_sympy_processed" + + +# ... (_generate_scl_header sin cambios)... +def _generate_scl_header(data, scl_block_name): + scl_output = [] + block_type = data.get("block_type", "Unknown") + block_name = data.get("block_name", "UnknownBlock") + block_number = data.get("block_number") + block_comment = data.get("block_comment", "") + scl_block_keyword = "FUNCTION_BLOCK" + if block_type == "FC": + scl_block_keyword = "FUNCTION" + elif block_type == "OB": + scl_block_keyword = "ORGANIZATION_BLOCK" + scl_output.append(f"// Block Type: {block_type}") + if block_name != scl_block_name: + scl_output.append(f"// Block Name (Original): {block_name}") + if block_number: + scl_output.append(f"// Block Number: {block_number}") + original_net_langs = set( + n.get("language", "Unknown") for n in data.get("networks", []) + ) + scl_output.append( + f"// Original Network Languages: {', '.join(l for l in original_net_langs if l != 'Unknown')}" + ) + if block_comment: + scl_output.append(f"// Block Comment:") + [scl_output.append(f"// {line}") for line in block_comment.splitlines()] + scl_output.append("") + if block_type == "FC": + return_type = "Void" + interface_data = data.get("interface", {}) + if interface_data.get("Return"): + return_member = interface_data["Return"][0] + return_type_raw = return_member.get("datatype", "Void") + return_type = ( + return_type_raw[1:-1] + if isinstance(return_type_raw, str) + and return_type_raw.startswith('"') + and return_type_raw.endswith('"') + else return_type_raw + ) + if return_type != return_type_raw and not ( + isinstance(return_type_raw, str) + and return_type_raw.lower().startswith("array") + ): + return_type = f'"{return_type}"' + else: + return_type = return_type_raw + scl_output.append(f'{scl_block_keyword} "{scl_block_name}" : {return_type}') + else: + scl_output.append(f'{scl_block_keyword} "{scl_block_name}"') + scl_output.append("{ S7_Optimized_Access := 'TRUE' }") + scl_output.append("VERSION : 0.1") + scl_output.append("") + return scl_output + + +# Modificar _generate_scl_interface para pasar project_root_dir +def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento + """Genera las secciones VAR_* de la interfaz SCL para FC/FB/OB.""" + scl_output = [] + section_order = [ + "Input", + "Output", + "InOut", + "Static", + "Temp", + "Constant", + "Return", + ] # Incluir Return + declared_temps = set() # Para _generate_scl_temp_vars + + for section_name in section_order: + vars_in_section = interface_data.get(section_name, []) + if vars_in_section: + scl_section_keyword = f"VAR_{section_name.upper()}" + end_keyword = "END_VAR" + if section_name == "Static": + scl_section_keyword = "VAR_STAT" + if section_name == "Temp": + scl_section_keyword = "VAR_TEMP" + if section_name == "Constant": + scl_section_keyword = "CONSTANT" + end_keyword = "END_CONSTANT" + if section_name == "Return": + scl_section_keyword = "VAR_OUTPUT" + # Retorno va en Output para FB/OB, implícito en FC + + # Para FC, la sección Return no se declara explícitamente aquí + if ( + interface_data.get("parent_block_type") == "FC" + and section_name == "Return" + ): + continue + + scl_output.append(scl_section_keyword) + # Pasar project_root_dir a generate_scl_declarations + scl_output.extend( + generate_scl_declarations( + vars_in_section, indent_level=1, project_root_dir=project_root_dir + ) + ) # <-- Pasar ruta raíz + scl_output.append(end_keyword) + scl_output.append("") + + if section_name == "Temp": + declared_temps.update( + format_variable_name(v.get("name")) + for v in vars_in_section + if v.get("name") + ) + return scl_output, declared_temps + + +# ... (_generate_scl_temp_vars y _generate_scl_body sin cambios) ... +def _generate_scl_temp_vars(data, declared_temps): + scl_output = [] + temp_vars_detected = set() + temp_pattern = re.compile(r'"?(#\w+)"?') + for network in data.get("networks", []): + for instruction in network.get("logic", []): + scl_code = instruction.get("scl", "") + edge_update_code = instruction.get("_edge_mem_update_scl", "") + code_to_scan = ( + (scl_code if scl_code else "") + + "\n" + + (edge_update_code if edge_update_code else "") + ) + if code_to_scan: + found_temps = temp_pattern.findall(code_to_scan) + [temp_vars_detected.add(t) for t in found_temps if t] + additional_temps = sorted(list(temp_vars_detected - declared_temps)) + if additional_temps: + print(f"INFO: Detectadas {len(additional_temps)} VAR_TEMP adicionales.") + temp_section_exists = any( + "VAR_TEMP" in s for s in data.get("generated_scl", []) + ) # Check if VAR_TEMP already exists + if not temp_section_exists and not declared_temps: + scl_output.append("VAR_TEMP") # Only add if no temps were declared before + for temp_name in additional_temps: + scl_name = format_variable_name(temp_name) + inferred_type = "Bool" + scl_output.append( + f" {scl_name} : {inferred_type}; // Auto-generated temporary" + ) + if not temp_section_exists and not declared_temps: + scl_output.append("END_VAR") + scl_output.append("") + return scl_output + + +def _generate_scl_body(networks): + scl_output = ["BEGIN", ""] + network_logic_added = False + for i, network in enumerate(networks): + network_title = network.get("title", f'Network {network.get("id", i+1)}') + network_comment = network.get("comment", "") + network_lang = network.get("language", "LAD") + scl_output.append( + f" // Network {i+1}: {network_title} (Original Language: {network_lang})" + ) + if network_comment: + [ + scl_output.append(f" // {line}") + for line in network_comment.splitlines() + ] + scl_output.append("") + network_has_code = False + logic_in_network = network.get("logic", []) + if not logic_in_network: + scl_output.append(f" // Network {i+1} has no logic elements.") + scl_output.append("") + continue + if network_lang == "STL": + if logic_in_network and logic_in_network[0].get("type") == "RAW_STL_CHUNK": + network_has_code = True + raw_stl_code = logic_in_network[0].get( + "stl", "// ERROR: STL code missing" + ) + scl_output.append(f" // --- BEGIN STL Network {i+1} ---") + scl_output.append(f" ```stl ") + [ + scl_output.append(f" {stl_line}") # scl_output.append(f" // {stl_line}") + for stl_line in raw_stl_code.splitlines() + ] + scl_output.append(f" ``` ") + scl_output.append(f" // --- END STL Network {i+1} ---") + scl_output.append("") + else: + scl_output.append( + f" // ERROR: Contenido STL inesperado en Network {i+1}." + ) + scl_output.append("") + else: + for instruction in logic_in_network: + instruction_type = instruction.get("type", "") + scl_code = instruction.get("scl", "") + is_grouped = instruction.get("grouped", False) + edge_update_scl = instruction.get("_edge_mem_update_scl", "") + if is_grouped: + continue + code_to_print = [] + if scl_code: + code_to_print.extend(scl_code.splitlines()) + if edge_update_scl: + code_to_print.extend( + edge_update_scl.splitlines() + ) # Append edge update SCL + if code_to_print: + is_only_comment = all( + line.strip().startswith("//") + for line in code_to_print + if line.strip() + ) + is_if_block = any( + line.strip().startswith("IF") for line in code_to_print + ) + if ( + not is_only_comment + or is_if_block + or "_error" in instruction_type + or instruction_type + in [ + "UNSUPPORTED_LANG", + "UNSUPPORTED_CONTENT", + "PARSING_ERROR", + "RAW_SCL_CHUNK", + ] + ): # Print RAW_SCL chunks too + network_has_code = True + [scl_output.append(f" {line}") for line in code_to_print] + scl_output.append("") + if not network_has_code and network_lang != "STL": + scl_output.append(f" // Network {i+1} did not produce printable SCL code.") + scl_output.append("") + if network_has_code: + network_logic_added = True # Mark if any network had code + # Add a default comment if no logic was generated at all + if not network_logic_added: + scl_output.append(" // No executable logic generated by script.") + scl_output.append("") + return scl_output + + +# Modificar generate_scl_for_code_block para aceptar y pasar project_root_dir +def generate_scl_for_code_block(data, project_root_dir): # <-- Nuevo argumento + """Genera el contenido SCL completo para un FC/FB/OB.""" + scl_output = [] + block_type = data.get("block_type", "Unknown") + scl_block_name = format_variable_name(data.get("block_name", "UnknownBlock")) + scl_block_keyword = "FUNCTION_BLOCK" # Default for FB + if block_type == "FC": + scl_block_keyword = "FUNCTION" + elif block_type == "OB": + scl_block_keyword = "ORGANIZATION_BLOCK" + + scl_output.extend(_generate_scl_header(data, scl_block_name)) + + interface_data = data.get("interface", {}) + interface_data["parent_block_type"] = block_type # Ayuda a _generate_scl_interface + # Pasar project_root_dir a _generate_scl_interface + interface_lines, declared_temps = _generate_scl_interface( + interface_data, project_root_dir + ) # <-- Pasar ruta raíz + scl_output.extend(interface_lines) + + # Generar VAR_TEMP adicionales (no necesita project_root_dir) + scl_output.extend(_generate_scl_temp_vars(data, declared_temps)) + + # Generar cuerpo (no necesita project_root_dir) + scl_output.extend(_generate_scl_body(data.get("networks", []))) + scl_output.append(f"END_{scl_block_keyword}") + + # Guardar SCL generado en data para _generate_scl_temp_vars + data["generated_scl"] = scl_output + + return scl_output diff --git a/backend/script_groups/XML Parser to SCL/generators/generate_scl_db.py b/backend/script_groups/XML Parser to SCL/generators/generate_scl_db.py new file mode 100644 index 0000000..595f64a --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/generators/generate_scl_db.py @@ -0,0 +1,54 @@ +# ToUpload/generators/generate_scl_db.py +# -*- coding: utf-8 -*- +# No necesita importar json/os aquí, lo hará generate_scl_declarations +from .generator_utils import format_variable_name, generate_scl_declarations + +# Modificar _generate_scl_header si es necesario, pero parece ok +def _generate_scl_header(data, scl_block_name): + # ... (código sin cambios) ... + scl_output = [] + block_type = data.get("block_type", "Unknown") + block_name = data.get("block_name", "UnknownBlock") + block_number = data.get("block_number") + block_comment = data.get("block_comment", "") + scl_output.append(f"// Block Type: {block_type}") + if block_name != scl_block_name: scl_output.append(f"// Block Name (Original): {block_name}") + if block_number: scl_output.append(f"// Block Number: {block_number}") + if block_comment: scl_output.append(f"// Block Comment:"); [scl_output.append(f"// {line}") for line in block_comment.splitlines()] + scl_output.append(""); scl_output.append(f'DATA_BLOCK "{scl_block_name}"'); scl_output.append("{ S7_Optimized_Access := 'TRUE' }") + scl_output.append("VERSION : 0.1"); scl_output.append("") + return scl_output + +# Modificar _generate_scl_interface para pasar project_root_dir +def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento + """Genera la sección VAR para DB (basada en 'Static').""" + scl_output = [] + static_vars = interface_data.get("Static", []) + if static_vars: + scl_output.append("VAR") + # Pasar project_root_dir a generate_scl_declarations + scl_output.extend(generate_scl_declarations(static_vars, indent_level=1, project_root_dir=project_root_dir)) # <-- Pasar ruta raíz + scl_output.append("END_VAR") + else: + print("Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB.") + scl_output.append("VAR\nEND_VAR") # Añadir vacío + scl_output.append("") + return scl_output + +# Modificar generate_scl_for_db para aceptar y pasar project_root_dir +def generate_scl_for_db(data, project_root_dir): # <-- Nuevo argumento + """Genera el contenido SCL completo para un DATA_BLOCK.""" + scl_output = [] + scl_block_name = format_variable_name(data.get("block_name", "UnknownDB")) + + scl_output.extend(_generate_scl_header(data, scl_block_name)) + + interface_data = data.get("interface", {}) + # Pasar project_root_dir a _generate_scl_interface + scl_output.extend(_generate_scl_interface(interface_data, project_root_dir)) # <-- Pasar ruta raíz + + scl_output.append("BEGIN") + scl_output.append(" // Data Blocks have no executable code") + scl_output.append("END_DATA_BLOCK") + + return scl_output \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/generators/generator_utils.py b/backend/script_groups/XML Parser to SCL/generators/generator_utils.py new file mode 100644 index 0000000..5644a58 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/generators/generator_utils.py @@ -0,0 +1,278 @@ +# ToUpload/generators/generator_utils.py +# -*- coding: utf-8 -*- +import re +import os +import json +import traceback # Para depuración si es necesario +import sys + +# --- Importar format_variable_name desde processors --- +try: + # Asumiendo que este script está en 'generators' y 'processors' está al mismo nivel + current_dir = os.path.dirname(os.path.abspath(__file__)) + project_base_dir = os.path.dirname(current_dir) + processors_dir = os.path.join(project_base_dir, 'processors') + if processors_dir not in sys.path: + sys.path.insert(0, processors_dir) # Añadir al path si no está + from processor_utils import format_variable_name +except ImportError: + print("Advertencia: No se pudo importar 'format_variable_name' desde processors.processor_utils.") + print("Usando una implementación local básica.") + def format_variable_name(name): # Fallback + if not name: return "_INVALID_NAME_" + if name.startswith('"') and name.endswith('"'): return name + prefix = "#" if name.startswith("#") else "" + if prefix: name = name[1:] + if name and name[0].isdigit(): name = "_" + name + name = re.sub(r"[^a-zA-Z0-9_]", "_", name) + return prefix + name +# --- Fin Fallback --- + +# --- format_scl_start_value (Sin cambios respecto a la versión anterior) --- +def format_scl_start_value(value, datatype): + if value is None: return None + # Convertir complex dict a string para procesar + if isinstance(value, dict): + # Si tiene 'value', usar ese. Si no, representar el dict como comentario + value_to_process = value.get('value') + if value_to_process is None: + return f"/* Init: {json.dumps(value)} */" # Representar dict como comentario + value = value_to_process # Usar el valor interno + + datatype_lower = datatype.lower() if isinstance(datatype, str) else "" + value_str = str(value) + + # Determinar si es tipo complejo (no estrictamente básico) + is_complex_type = ( + ('"' in datatype_lower) or ('array' in datatype_lower) or ('struct' in datatype_lower) or + datatype_lower not in { + "bool", "int", "dint", "sint", "usint", "uint", "udint", "lint", "ulint", + "byte", "word", "dword", "lword", "real", "lreal", "time", "ltime", + "s5time", "date", "dt", "dtl", "tod", "string", "char", "wstring", "wchar", "variant", + "timer", "counter", "iec_timer", "iec_counter", "iec_sfc", "iec_ld_timer" # Añadir otros tipos IEC comunes + } + ) + + if is_complex_type: + # Para tipos complejos, solo permitir constantes simbólicas o inicializadores básicos (0, FALSE, '') + if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str): return value_str # Constante simbólica + if value_str == '0': return '0' # Cero numérico + if value_str.lower() == 'false': return 'FALSE' # Booleano Falso + if value_str == "''" or value_str == "": return "''" # String vacío + # Ignorar otros valores iniciales para tipos complejos (incluye JSON de arrays) + # print(f"INFO: Start value '{value_str}' for complex type '{datatype}' skipped.") + return None + + # Quitar comillas simples/dobles externas si las hay + value_str_unquoted = value_str + if len(value_str) > 1: + if value_str.startswith('"') and value_str.endswith('"'): value_str_unquoted = value_str[1:-1] + elif value_str.startswith("'") and value_str.endswith("'"): value_str_unquoted = value_str[1:-1] + + # Formateo por tipo básico + if any(t in datatype_lower for t in ["int","byte","word","dint","dword","lint","lword","sint","usint","uint","udint","ulint"]): + try: return str(int(value_str_unquoted)) + except ValueError: return value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else None # Permitir constante simbólica + elif "bool" in datatype_lower: + val_low = value_str_unquoted.lower(); + if val_low in ['true', '1']: return "TRUE" + elif val_low in ['false', '0']: return "FALSE" + else: return value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else "FALSE" # Default FALSE + elif "string" in datatype_lower or "char" in datatype_lower: + escaped_value = value_str_unquoted.replace("'", "''") # Escapar comillas simples + prefix = "WSTRING#" if "wstring" in datatype_lower else ("WCHAR#" if "wchar" in datatype_lower else "") + return f"{prefix}'{escaped_value}'" # Usar comillas simples SCL + elif "real" in datatype_lower or "lreal" in datatype_lower: + try: + f_val = float(value_str_unquoted) + s_val = "{:.7g}".format(f_val) # Notación científica si es necesario, precisión limitada + return s_val + (".0" if "." not in s_val and "e" not in s_val.lower() else "") # Añadir .0 si es entero + except ValueError: return value_str_unquoted if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted) else None # Permitir constante simbólica + elif "time" in datatype_lower: # Incluye TIME, LTIME, S5TIME + prefix, val_to_use = "", value_str_unquoted + # Extraer prefijo si ya existe (T#, LT#, S5T#) + match_prefix = re.match(r"^(T#|LT#|S5T#)(.*)", val_to_use, re.IGNORECASE) + if match_prefix: prefix, val_to_use = match_prefix.groups() + # Validar formato del valor de tiempo (simplificado) + if re.match(r'^-?(\d+d_)?(\d+h_)?(\d+m_)?(\d+s_)?(\d+ms)?$', val_to_use, re.IGNORECASE): + target_prefix = "S5T#" if "s5time" in datatype_lower else ("LT#" if "ltime" in datatype_lower else "T#") + return f"{target_prefix}{val_to_use}" + elif re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted): return value_str_unquoted # Constante simbólica + else: return None # Formato inválido + elif any(t in datatype_lower for t in ["date", "dtl", "dt", "tod", "time_of_day"]): + val_to_use = value_str_unquoted; prefix = "" + # Extraer prefijo si ya existe (DTL#, D#, DT#, TOD#) + match_prefix = re.match(r"^(DTL#|D#|DT#|TOD#)(.*)", val_to_use, re.IGNORECASE) + if match_prefix: prefix, val_to_use = match_prefix.groups() + # Determinar prefijo SCL correcto + target_prefix="DTL#" if "dtl" in datatype_lower or "date_and_time" in datatype_lower else ("DT#" if "dt" in datatype_lower else ("TOD#" if "tod" in datatype_lower or "time_of_day" in datatype_lower else "D#")) + # Validar formato (simplificado) + if re.match(r'^\d{4}-\d{2}-\d{2}(-\d{2}:\d{2}:\d{2}(\.\d+)?)?$', val_to_use) or re.match(r'^\d{2}:\d{2}:\d{2}(\.\d+)?$', val_to_use): + return f"{target_prefix}{val_to_use}" + elif re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str_unquoted): return value_str_unquoted # Constante simbólica + else: return None # Formato inválido + else: # Otros tipos o desconocidos + return value_str if re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', value_str) else None # Solo permitir constantes simbólicas + + +# <-- MODIFICADO: generate_scl_declarations --> +def generate_scl_declarations(variables, indent_level=1, project_root_dir=None): + """ + Genera líneas SCL para declarar variables, manejando UDTs, FBs (InstanceOfName), + Arrays y Structs. + """ + scl_lines = [] + indent = " " * indent_level + # Lista de tipos básicos simples (en minúsculas) - ampliada + basic_types = { + "bool", "int", "dint", "sint", "usint", "uint", "udint", "lint", "ulint", + "byte", "word", "dword", "lword", "real", "lreal", "time", "ltime", + "s5time", "date", "dt", "dtl", "tod", "time_of_day", # TOD sinónimos + "char", "wchar", "variant", + # Tipos IEC comunes + "timer", "counter", "iec_timer", "iec_counter", "iec_sfc", "iec_ld_timer" + } + + # Patrones para tipos básicos parametrizados (ignorando mayúsculas/minúsculas) + string_pattern = re.compile(r"^(W?STRING)(\[\s*\d+\s*\])?$", re.IGNORECASE) + array_pattern = re.compile(r'^(Array\[.*\]\s+of\s+)(.*)', re.IGNORECASE) + + for var in variables: + var_name_scl = format_variable_name(var.get("name")) + var_dtype_raw = var.get("datatype", "VARIANT") + # <-- NUEVO: Obtener instance_of_name --> + instance_of_name = var.get("instance_of_name") # Puede ser None + # <-- FIN NUEVO --> + var_comment = var.get("comment") + start_value_raw = var.get("start_value") + children = var.get("children") # Para STRUCT anidados + array_elements = var.get("array_elements") # Para inicialización de ARRAY + + declaration_dtype = var_dtype_raw # Tipo a usar en la declaración SCL + base_type_for_init = var_dtype_raw # Tipo base para formatear valor inicial + is_array = False + is_struct_inline = bool(children) # Es un STRUCT definido inline + is_potential_udt_or_fb = False # Flag para comprobar si buscar archivo .json + type_to_check = None # Nombre limpio del tipo a buscar (UDT o FB) + + # --- Lógica Principal de Determinación de Tipo --- + if is_struct_inline: + # Si tiene hijos, se declara como STRUCT ... END_STRUCT + declaration_dtype = "STRUCT" + base_type_for_init = "STRUCT" # Valor inicial no aplica a STRUCT directamente + elif isinstance(var_dtype_raw, str): + # 1. Comprobar si es FB Instance usando InstanceOfName + if instance_of_name: + # Si InstanceOfName existe, usarlo como tipo (entre comillas) + declaration_dtype = f'"{instance_of_name}"' + base_type_for_init = instance_of_name # Usar nombre limpio para init/check + is_potential_udt_or_fb = True # Marcar para buscar archivo FB + type_to_check = instance_of_name + else: + # 2. No es FB Instance directo, comprobar si es Array + array_match = array_pattern.match(var_dtype_raw) + if array_match: + is_array = True + array_prefix_for_decl = array_match.group(1) + base_type_raw = array_match.group(2).strip() + base_type_for_init = base_type_raw # Tipo base para init/check + + # Limpiar tipo base para comprobar si es básico/UDT/String + base_type_clean = base_type_raw[1:-1] if base_type_raw.startswith('"') and base_type_raw.endswith('"') else base_type_raw + base_type_lower = base_type_clean.lower() + + # ¿El tipo base es UDT/FB conocido o un tipo básico/paramétrico? + if (base_type_lower not in basic_types and + not string_pattern.match(base_type_clean)): + # Asumir UDT/FB si no es básico ni String[N]/Char + declaration_dtype = f'{array_prefix_for_decl}"{base_type_clean}"' # Poner comillas + is_potential_udt_or_fb = True # Marcar para buscar archivo UDT/FB + type_to_check = base_type_clean + else: + # Es básico o String[N]/Char + declaration_dtype = f'{array_prefix_for_decl}{base_type_raw}' # Usar como viene (puede tener comillas si era así) + else: + # 3. No es FB ni Array, ¿es UDT, String, Char o Básico? + base_type_clean = var_dtype_raw[1:-1] if var_dtype_raw.startswith('"') and var_dtype_raw.endswith('"') else var_dtype_raw + base_type_lower = base_type_clean.lower() + base_type_for_init = base_type_clean # Tipo base para init/check + + if (base_type_lower not in basic_types and + not string_pattern.match(base_type_clean)): + # Asumir UDT/FB si no es básico ni String[N]/Char + declaration_dtype = f'"{base_type_clean}"' # Poner comillas + is_potential_udt_or_fb = True # Marcar para buscar archivo UDT/FB + type_to_check = base_type_clean + else: + # Es básico o String[N]/Char + declaration_dtype = var_dtype_raw # Usar como viene + + # --- Búsqueda Opcional de Archivo de Definición (UDT o FB) --- + if is_potential_udt_or_fb and type_to_check and project_root_dir: + # Buscar tanto en 'PLC data types' como en 'Program blocks' + found_path = None + type_scl_name = format_variable_name(type_to_check) + possible_paths = [ + os.path.join(project_root_dir, 'PLC data types', 'parsing', f'{type_scl_name}_processed.json'), + os.path.join(project_root_dir, 'Program blocks', 'parsing', f'{type_scl_name}_processed.json') + # Añadir más rutas si la estructura del proyecto varía + ] + for path in possible_paths: + if os.path.exists(path): + found_path = path + break + + if found_path: + print(f" INFO: Definición '{type_to_check}' localizada en: '{os.path.relpath(found_path, project_root_dir)}'") + else: + print(f" WARNING: No se encontró definición para '{type_to_check}'. Se buscó en directorios estándar.") + + # --- Construir Línea de Declaración SCL --- + declaration_line = f"{indent}{var_name_scl} : {declaration_dtype}" + init_value_scl_part = "" + + if is_struct_inline: + # Generar STRUCT anidado + scl_lines.append(declaration_line) # Añade "VarName : STRUCT" + # Llamada recursiva para los hijos + scl_lines.extend(generate_scl_declarations(children, indent_level + 1, project_root_dir)) + scl_lines.append(f"{indent}END_STRUCT;") + # Añadir comentario al END_STRUCT si existe + if var_comment: scl_lines[-1] += f" // {var_comment}" + scl_lines.append("") # Línea en blanco después del struct + continue # Pasar a la siguiente variable del nivel actual + + # --- Manejo de Valor Inicial (para no-STRUCTs) --- + init_value_scl = None + if is_array and array_elements: + # Inicialización de Array + init_values = [] + try: # Intentar ordenar índices numéricamente + indices_numeric = {int(k): v for k, v in array_elements.items()} + sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())] + except ValueError: # Ordenar como strings si no son numéricos + print(f"Advertencia: Índices array no numéricos para '{var_name_scl}', ordenando como strings.") + sorted_indices_str = sorted(array_elements.keys()) + + for idx_str in sorted_indices_str: + val_info = array_elements[idx_str] # val_info puede ser dict o valor directo + # Formatear valor usando el tipo base del array + formatted_val = format_scl_start_value(val_info, base_type_for_init) + # Usar 'NULL' o comentario si el formateo falla o es complejo + init_values.append(formatted_val if formatted_val is not None else f"/* Array[{idx_str}] unsupported init */") + + if init_values: init_value_scl = f"[{', '.join(init_values)}]" + elif not is_array and not is_struct_inline and start_value_raw is not None: + # Inicialización de variable simple + init_value_scl = format_scl_start_value(start_value_raw, base_type_for_init) + + # Añadir parte del valor inicial si existe + if init_value_scl is not None: + init_value_scl_part = f" := {init_value_scl}" + + # Combinar todo para la línea final + declaration_line += f"{init_value_scl_part};" + if var_comment: declaration_line += f" // {var_comment}" + scl_lines.append(declaration_line) + + return scl_lines \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/parsers/__init__.py b/backend/script_groups/XML Parser to SCL/parsers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..245171f Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..79a9abc Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/block_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/block_parser.cpython-310.pyc new file mode 100644 index 0000000..f8a41b1 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/block_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/flg_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/flg_parser.cpython-310.pyc new file mode 100644 index 0000000..b0e7e9b Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/flg_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/interface_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/interface_parser.cpython-310.pyc new file mode 100644 index 0000000..9f6a483 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/interface_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/network_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/network_parser.cpython-310.pyc new file mode 100644 index 0000000..a914f3e Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/network_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-310.pyc new file mode 100644 index 0000000..a21cabb Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-312.pyc new file mode 100644 index 0000000..6c7f545 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_lad_fbd.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-310.pyc new file mode 100644 index 0000000..305b706 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-312.pyc new file mode 100644 index 0000000..1895929 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_scl.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-310.pyc new file mode 100644 index 0000000..3f07d46 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-312.pyc new file mode 100644 index 0000000..32b3847 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parse_stl.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-310.pyc new file mode 100644 index 0000000..0f1fdf0 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-312.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-312.pyc new file mode 100644 index 0000000..cb34e06 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/parser_utils.cpython-312.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/scl_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/scl_parser.cpython-310.pyc new file mode 100644 index 0000000..305f2ee Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/scl_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/__pycache__/stl_parser.cpython-310.pyc b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/stl_parser.cpython-310.pyc new file mode 100644 index 0000000..c2384d3 Binary files /dev/null and b/backend/script_groups/XML Parser to SCL/parsers/__pycache__/stl_parser.cpython-310.pyc differ diff --git a/backend/script_groups/XML Parser to SCL/parsers/parse_lad_fbd.py b/backend/script_groups/XML Parser to SCL/parsers/parse_lad_fbd.py new file mode 100644 index 0000000..f3e7ad9 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/parsers/parse_lad_fbd.py @@ -0,0 +1,548 @@ +# ToUpload/parsers/parse_lad_fbd.py +# -*- coding: utf-8 -*- +from lxml import etree +from collections import defaultdict +import copy +import traceback + +# Importar desde las utilidades del parser +from .parser_utils import ( + ns, + parse_access, + parse_part, + parse_call, + get_multilingual_text, +) + +# Sufijo usado en x2 para identificar instrucciones procesadas (útil para EN/ENO) +SCL_SUFFIX = "_sympy_processed" # Asumimos que este es el sufijo de x2 + + +def parse_lad_fbd_network(network_element): + """ + Parsea una red LAD/FBD/GRAPH, extrae lógica y añade conexiones EN/ENO implícitas. + Devuelve un diccionario representando la red para el JSON. + """ + if network_element is None: + return { + "id": "ERROR", + "title": "Invalid Network Element", + "logic": [], + "error": "Input element was None", + } + + network_id = network_element.get("ID") + # Usar get_multilingual_text de utils + title_element = network_element.xpath( + ".//iface:MultilingualText[@CompositionName='Title']", namespaces=ns + ) + network_title = ( + get_multilingual_text(title_element[0]) + if title_element + else f"Network {network_id}" + ) + comment_element = network_element.xpath( + "./ObjectList/MultilingualText[@CompositionName='Comment']", namespaces=ns + ) # OJO: Path relativo a CompileUnit? + if not comment_element: # Intentar path alternativo si el anterior falla + comment_element = network_element.xpath( + ".//MultilingualText[@CompositionName='Comment']", namespaces=ns + ) # Más genérico dentro de la red + network_comment = ( + get_multilingual_text(comment_element[0]) if comment_element else "" + ) + + # --- Determinar Lenguaje (ya que este parser maneja varios) --- + network_lang = "Unknown" + attr_list_net = network_element.xpath("./AttributeList") + if attr_list_net: + lang_node_net = attr_list_net[0].xpath("./ProgrammingLanguage/text()") + if lang_node_net: + network_lang = lang_node_net[0].strip() + + # --- Buscar FlgNet --- + # Buscar NetworkSource y luego FlgNet (ambos usan namespace flg) + network_source_node = network_element.xpath(".//flg:NetworkSource", namespaces=ns) + flgnet = None + if network_source_node: + flgnet_list = network_source_node[0].xpath("./flg:FlgNet", namespaces=ns) + if flgnet_list: + flgnet = flgnet_list[0] + else: # Intentar buscar FlgNet directamente si no hay NetworkSource + flgnet_list = network_element.xpath(".//flg:FlgNet", namespaces=ns) + if flgnet_list: + flgnet = flgnet_list[0] + + if flgnet is None: + return { + "id": network_id, + "title": network_title, + "comment": network_comment, + "language": network_lang, + "logic": [], + "error": "FlgNet not found inside NetworkSource or CompileUnit", + } + + # 1. Parse Access, Parts, Calls (usan utils) + access_map = {} + # Corregir XPath para buscar Access dentro de FlgNet/Parts + for acc in flgnet.xpath(".//flg:Parts/flg:Access", namespaces=ns): + acc_info = parse_access(acc) + if acc_info and acc_info.get("uid") and "error" not in acc_info.get("type", ""): + access_map[acc_info["uid"]] = acc_info + elif acc_info: + print( + f"Advertencia: Ignorando Access inválido o con error UID={acc_info.get('uid')} en red {network_id}" + ) + + parts_and_calls_map = {} + # Corregir XPath para buscar Part y Call dentro de FlgNet/Parts + instruction_elements = flgnet.xpath( + ".//flg:Parts/flg:Part | .//flg:Parts/flg:Call", namespaces=ns + ) + for element in instruction_elements: + parsed_info = None + tag_name = etree.QName(element.tag).localname + if tag_name == "Part": + parsed_info = parse_part(element) # Usa utils + elif tag_name == "Call": + parsed_info = parse_call(element) # Usa utils + + if ( + parsed_info + and parsed_info.get("uid") + and "error" not in parsed_info.get("type", "") + ): + parts_and_calls_map[parsed_info["uid"]] = parsed_info + elif parsed_info: + # Si parse_call/parse_part devolvió error, lo guardamos para tener el UID + print( + f"Advertencia: {tag_name} con error UID={parsed_info.get('uid')} en red {network_id}. Error: {parsed_info.get('error')}" + ) + parts_and_calls_map[parsed_info["uid"]] = ( + parsed_info # Guardar aunque tenga error + ) + + # 2. Parse Wires (lógica compleja, mantener aquí) + wire_connections = defaultdict(list) # destination -> [source1, source2] + source_connections = defaultdict(list) # source -> [dest1, dest2] + eno_outputs = defaultdict(list) + qname_powerrail = etree.QName(ns["flg"], "Powerrail") + qname_identcon = etree.QName( + ns["flg"], "IdentCon" + ) # Conexión a/desde Access (variable/constante) + qname_namecon = etree.QName( + ns["flg"], "NameCon" + ) # Conexión a/desde Part/Call (pin con nombre) + qname_openbranch = etree.QName( + ns["flg"], "Openbranch" + ) # Rama abierta (normalmente ignorada o tratada como TRUE?) + qname_opencon = etree.QName( + ns["flg"], "OpenCon" + ) # Conexión abierta (pin no conectado) + + # Corregir XPath para buscar Wire dentro de FlgNet/Wires + for wire in flgnet.xpath(".//flg:Wires/flg:Wire", namespaces=ns): + children = wire.getchildren() + if len(children) < 2: + continue # Necesita al menos origen y destino + + source_elem = children[0] + source_uid, source_pin = None, None + + # Determinar origen + if source_elem.tag == qname_powerrail: + source_uid, source_pin = "POWERRAIL", "out" + elif source_elem.tag == qname_identcon: # Origen es una variable/constante + source_uid = source_elem.get("UId") + source_pin = "value" # Salida implícita de un Access + elif source_elem.tag == qname_namecon: # Origen es pin de instrucción + source_uid = source_elem.get("UId") + source_pin = source_elem.get("Name") + elif source_elem.tag == qname_openbranch: + # ¿Cómo manejar OpenBranch como fuente? Podría ser TRUE o una condición OR implícita + source_uid = "OPENBRANCH_" + wire.get( + "UId", "Unknown" + ) # UID único para la rama + source_pin = "out" + print( + f"Advertencia: OpenBranch encontrado como fuente en Wire UID={wire.get('UId')} (Red {network_id}). Tratando como fuente especial." + ) + # No lo añadimos a parts_and_calls_map, get_sympy_representation necesitará manejarlo + # Ignorar OpenCon como fuente (no tiene sentido) + if source_uid is None or source_pin is None: + # print(f"Advertencia: Fuente de wire inválida o no soportada: {source_elem.tag} en Wire UID={wire.get('UId')}") + continue + + source_info = (source_uid, source_pin) + + # Procesar destinos + for dest_elem in children[1:]: + dest_uid, dest_pin = None, None + + if ( + dest_elem.tag == qname_identcon + ): # Destino es una variable/constante (asignación) + dest_uid = dest_elem.get("UId") + dest_pin = "value" # Entrada implícita de un Access + elif dest_elem.tag == qname_namecon: # Destino es pin de instrucción + dest_uid = dest_elem.get("UId") + dest_pin = dest_elem.get("Name") + # Ignorar Powerrail, OpenBranch, OpenCon como destinos válidos de conexión lógica principal + + if dest_uid is not None and dest_pin is not None: + dest_key = (dest_uid, dest_pin) + if source_info not in wire_connections[dest_key]: + wire_connections[dest_key].append(source_info) + + # Mapa inverso: source -> list of destinations + source_key = (source_uid, source_pin) + dest_info = (dest_uid, dest_pin) + if dest_info not in source_connections[source_key]: + source_connections[source_key].append(dest_info) + + # Trackear salidas ENO específicamente si la fuente es una instrucción + if source_pin == "eno" and source_uid in parts_and_calls_map: + if dest_info not in eno_outputs[source_uid]: + eno_outputs[source_uid].append(dest_info) + + # 3. Build Initial Logic Structure (incorporando errores) + all_logic_steps = {} + # Lista de tipos funcionales (usados para inferencia EN) + # Estos son los tipos *originales* de las instrucciones + functional_block_types = [ + "Move", + "Add", + "Sub", + "Mul", + "Div", + "Mod", + "Convert", + "Call", # Call ya está aquí + "TON", + "TOF", + "TP", + "CTU", + "CTD", + "CTUD", + "BLKMOV", # Añadidos + "Se", + "Sd", # Estos son tipos LAD que se mapearán a timers SCL + ] + # Lista de generadores RLO (usados para inferencia EN) + rlo_generators = [ + "Contact", + "O", + "Eq", + "Ne", + "Gt", + "Lt", + "Ge", + "Le", + "And", + "Xor", + "PBox", + "NBox", + "Not", + ] + + # Iterar sobre UIDs válidos (los que se pudieron parsear, aunque sea con error) + valid_instruction_uids = list(parts_and_calls_map.keys()) + + for instruction_uid in valid_instruction_uids: + instruction_info = parts_and_calls_map[instruction_uid] + # Hacer copia profunda para no modificar el mapa original + instruction_repr = copy.deepcopy(instruction_info) + instruction_repr["instruction_uid"] = instruction_uid # Asegurar UID + instruction_repr["inputs"] = {} + instruction_repr["outputs"] = {} + + # Si la instrucción ya tuvo un error de parseo, añadirlo aquí + if "error" in instruction_info: + instruction_repr["parsing_error"] = instruction_info["error"] + # No intentar poblar inputs/outputs si el parseo base falló + all_logic_steps[instruction_uid] = instruction_repr + continue + + original_type = instruction_repr.get("type", "") # Tipo de la instrucción + + # --- Poblar Entradas --- + # Lista base de pines posibles (podría obtenerse de XSDs o dinámicamente) + possible_input_pins = set(["en", "in", "in1", "in2", "pre"]) + # Añadir pines dinámicamente basados en el tipo de instrucción + if original_type in ["Contact", "Coil", "SCoil", "RCoil", "SdCoil"]: + possible_input_pins.add("operand") + elif original_type in [ + "Add", + "Sub", + "Mul", + "Div", + "Mod", + "Eq", + "Ne", + "Gt", + "Lt", + "Ge", + "Le", + ]: + possible_input_pins.update(["in1", "in2"]) + elif original_type in ["TON", "TOF", "TP"]: + possible_input_pins.update(["IN", "PT"]) # Pines SCL + elif original_type in ["Se", "Sd"]: + possible_input_pins.update(["s", "tv", "timer"]) # Pines LAD + elif original_type in ["CTU", "CTD", "CTUD"]: + possible_input_pins.update(["CU", "CD", "R", "LD", "PV"]) # Pines SCL/LAD + elif original_type in ["PBox", "NBox"]: + possible_input_pins.update( + ["bit", "clk", "in"] + ) # PBox/NBox usa 'in' y 'bit' + elif original_type == "BLKMOV": + possible_input_pins.add("SRCBLK") + elif original_type == "Move": + possible_input_pins.add("in") + elif original_type == "Convert": + possible_input_pins.add("in") + elif original_type == "Call": + # Para Calls, los nombres de los parámetros reales se definen en el XML + # El Xpath busca Parameter DENTRO de CallInfo, que está DENTRO de Call + call_xml_element_list = flgnet.xpath( + f".//flg:Parts/flg:Call[@UId='{instruction_uid}']", namespaces=ns + ) + if call_xml_element_list: + call_xml_element = call_xml_element_list[0] + call_info_node_list = call_xml_element.xpath( + "./flg:CallInfo", namespaces=ns + ) + if call_info_node_list: + call_param_names = call_info_node_list[0].xpath( + "./flg:Parameter/@Name", namespaces=ns + ) + possible_input_pins.update(call_param_names) + # print(f"DEBUG Call UID={instruction_uid}: Params={call_param_names}") + else: # Fallback si no hay namespace (menos probable) + call_info_node_list_no_ns = call_xml_element.xpath("./CallInfo") + if call_info_node_list_no_ns: + possible_input_pins.update( + call_info_node_list_no_ns[0].xpath("./Parameter/@Name") + ) + + # Iterar sobre pines posibles y buscar conexiones + for pin_name in possible_input_pins: + dest_key = (instruction_uid, pin_name) + if dest_key in wire_connections: + sources_list = wire_connections[dest_key] + input_sources_repr = [] + for source_uid, source_pin in sources_list: + source_repr = None + if source_uid == "POWERRAIL": + source_repr = {"type": "powerrail"} + elif source_uid.startswith("OPENBRANCH_"): + source_repr = { + "type": "openbranch", + "uid": source_uid, + } # Fuente especial + elif source_uid in access_map: + source_repr = copy.deepcopy(access_map[source_uid]) + elif source_uid in parts_and_calls_map: + source_instr_info = parts_and_calls_map[source_uid] + source_repr = { + "type": "connection", + "source_instruction_type": source_instr_info.get( + "type", "Unknown" + ), # Usar tipo base + "source_instruction_uid": source_uid, + "source_pin": source_pin, + } + else: + # Fuente desconocida (ni Access, ni Part/Call válido) + print( + f"Advertencia: Fuente desconocida UID={source_uid} conectada a {instruction_uid}.{pin_name}" + ) + source_repr = {"type": "unknown_source", "uid": source_uid} + input_sources_repr.append(source_repr) + + # Guardar la representación de la entrada (lista o dict) + instruction_repr["inputs"][pin_name] = ( + input_sources_repr[0] + if len(input_sources_repr) == 1 + else input_sources_repr + ) + + # --- Poblar Salidas (simplificado: solo conexiones a Access) --- + possible_output_pins = set( + [ + "out", + "out1", + "Q", + "q", + "eno", + "RET_VAL", + "DSTBLK", + "rt", + "cv", + "QU", + "QD", + "ET", # Añadir pines de salida estándar SCL + ] + ) + if original_type == "BLKMOV": + possible_output_pins.add("DSTBLK") + if ( + original_type == "Call" + ): # Para Calls, las salidas dependen del bloque llamado + call_xml_element_list = flgnet.xpath( + f".//flg:Parts/flg:Call[@UId='{instruction_uid}']", namespaces=ns + ) + if call_xml_element_list: + call_info_node_list = call_xml_element_list[0].xpath( + "./flg:CallInfo", namespaces=ns + ) + if call_info_node_list: + # Buscar parámetros con Section="Output" o "InOut" o "Return" + output_param_names = call_info_node_list[0].xpath( + "./flg:Parameter[@Section='Output' or @Section='InOut' or @Section='Return']/@Name", + namespaces=ns, + ) + possible_output_pins.update(output_param_names) + + for pin_name in possible_output_pins: + source_key = (instruction_uid, pin_name) + if source_key in source_connections: + if pin_name not in instruction_repr["outputs"]: + instruction_repr["outputs"][pin_name] = [] + for dest_uid, dest_pin in source_connections[source_key]: + if ( + dest_uid in access_map + ): # Solo registrar si va a una variable/constante + dest_operand_copy = copy.deepcopy(access_map[dest_uid]) + if ( + dest_operand_copy + not in instruction_repr["outputs"][pin_name] + ): + instruction_repr["outputs"][pin_name].append( + dest_operand_copy + ) + + all_logic_steps[instruction_uid] = instruction_repr + + # 4. Inferencia EN (modificado para usar tipos originales) + processed_blocks_en_inference = set() + try: + # Ordenar UIDs numéricamente si es posible + sorted_uids_for_en = sorted( + all_logic_steps.keys(), + key=lambda x: ( + int(x) if isinstance(x, str) and x.isdigit() else float("inf") + ), + ) + except ValueError: + sorted_uids_for_en = sorted(all_logic_steps.keys()) # Fallback sort + + ordered_logic_list_for_en = [ + all_logic_steps[uid] for uid in sorted_uids_for_en if uid in all_logic_steps + ] + + for i, instruction in enumerate(ordered_logic_list_for_en): + part_uid = instruction["instruction_uid"] + # Usar el tipo original para la lógica de inferencia + part_type_original = ( + instruction.get("type", "").replace(SCL_SUFFIX, "").replace("_error", "") + ) + + # Inferencia solo para tipos funcionales que no tengan EN explícito + if ( + part_type_original in functional_block_types + and "en" not in instruction.get("inputs", {}) + and part_uid not in processed_blocks_en_inference + and "error" not in part_type_original + ): # No inferir para errores + + inferred_en_source = None + # Buscar hacia atrás en la lista ordenada + if i > 0: + for j in range(i - 1, -1, -1): + prev_instr = ordered_logic_list_for_en[j] + if "error" in prev_instr.get("type", ""): + continue # Saltar errores previos + + prev_uid = prev_instr["instruction_uid"] + prev_type_original = ( + prev_instr.get("type", "") + .replace(SCL_SUFFIX, "") + .replace("_error", "") + ) + + if prev_type_original in rlo_generators: # Fuente RLO encontrada + inferred_en_source = { + "type": "connection", + "source_instruction_uid": prev_uid, + "source_instruction_type": prev_type_original, # Tipo original + "source_pin": "out", + } + break # Detener búsqueda + elif ( + prev_type_original in functional_block_types + ): # Bloque funcional previo + # Comprobar si este bloque tiene salida ENO conectada + if (prev_uid, "eno") in source_connections: + inferred_en_source = { + "type": "connection", + "source_instruction_uid": prev_uid, + "source_instruction_type": prev_type_original, # Tipo original + "source_pin": "eno", + } + # Si no tiene ENO conectado, el flujo RLO se detiene aquí + break # Detener búsqueda + elif prev_type_original in [ + "Coil", + "SCoil", + "RCoil", + "SdCoil", + "SetCoil", + "ResetCoil", + ]: + # Bobinas terminan el flujo RLO + break # Detener búsqueda + + # Si no se encontró fuente, conectar a PowerRail + if inferred_en_source is None: + inferred_en_source = {"type": "powerrail"} + + # Actualizar la instrucción EN el diccionario principal + if part_uid in all_logic_steps: + # Asegurar que inputs exista + if "inputs" not in all_logic_steps[part_uid]: + all_logic_steps[part_uid]["inputs"] = {} + all_logic_steps[part_uid]["inputs"]["en"] = inferred_en_source + processed_blocks_en_inference.add(part_uid) + + # 5. Lógica ENO (añadir destinos ENO si existen) + for source_instr_uid, eno_destinations in eno_outputs.items(): + if source_instr_uid in all_logic_steps and "error" not in all_logic_steps[ + source_instr_uid + ].get("type", ""): + all_logic_steps[source_instr_uid]["eno_destinations"] = eno_destinations + + # 6. Ordenar y Devolver + final_logic_list = [ + all_logic_steps[uid] for uid in sorted_uids_for_en if uid in all_logic_steps + ] + + return { + "id": network_id, + "title": network_title, + "comment": network_comment, + "language": network_lang, # Lenguaje original de la red + "logic": final_logic_list, + # No añadir 'error' aquí a menos que el parseo completo falle + } + + +# --- Función de Información del Parser --- +def get_parser_info(): + """Devuelve la información para este parser.""" + # Este parser maneja LAD, FBD y GRAPH + return { + "language": ["LAD", "FBD", "GRAPH"], # Lista de lenguajes soportados + "parser_func": parse_lad_fbd_network, # Función a llamar + } diff --git a/backend/script_groups/XML Parser to SCL/parsers/parse_scl.py b/backend/script_groups/XML Parser to SCL/parsers/parse_scl.py new file mode 100644 index 0000000..b88e779 --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/parsers/parse_scl.py @@ -0,0 +1,253 @@ +# ToUpload/parsers/parse_scl.py +# -*- coding: utf-8 -*- +from lxml import etree +import re + +# Importar desde las utilidades del parser +from .parser_utils import ns, get_multilingual_text + +def reconstruct_scl_from_tokens(st_node): + """ + Reconstruye SCL desde , mejorando el manejo de + variables, constantes literales, tokens básicos, espacios y saltos de línea. + """ + if st_node is None: + return "// Error: StructuredText node not found.\n" + + scl_parts = [] + # Usar st:* para obtener todos los elementos hijos dentro del namespace st + children = st_node.xpath("./st:*", namespaces=ns) + + for elem in children: + tag = etree.QName(elem.tag).localname + + if tag == "Token": + scl_parts.append(elem.get("Text", "")) + elif tag == "Blank": + # Añadir espacios solo si es necesario o más de uno + num_spaces = int(elem.get("Num", 1)) + if not scl_parts or not scl_parts[-1].endswith(" "): + scl_parts.append(" " * num_spaces) + elif num_spaces > 1: + scl_parts.append(" " * (num_spaces -1)) + + elif tag == "NewLine": + # Quitar espacios finales antes del salto de línea + if scl_parts: + scl_parts[-1] = scl_parts[-1].rstrip() + scl_parts.append("\n") + elif tag == "Access": + scope = elem.get("Scope") + access_str = f"/*_ERR_Scope_{scope}_*/" # Placeholder + + # --- Variables --- + if scope in [ + "GlobalVariable", "LocalVariable", "TempVariable", "InOutVariable", + "InputVariable", "OutputVariable", "ConstantVariable", + "GlobalConstant", "LocalConstant" # Añadir constantes simbólicas + ]: + symbol_elem = elem.xpath("./st:Symbol", namespaces=ns) + if symbol_elem: + components = symbol_elem[0].xpath("./st:Component", namespaces=ns) + symbol_text_parts = [] + for i, comp in enumerate(components): + name = comp.get("Name", "_ERR_COMP_") + if i > 0: symbol_text_parts.append(".") + + # Check for HasQuotes attribute (adjust namespace if needed) + # El atributo está en el Component o en el Access padre? Probar ambos + has_quotes_comp = comp.get("HasQuotes", "false").lower() == "true" # Check directly on Component + has_quotes_access = False + access_parent = comp.xpath("ancestor::st:Access[1]", namespaces=ns) # Get immediate Access parent + if access_parent: + has_quotes_attr = access_parent[0].xpath("./st:BooleanAttribute[@Name='HasQuotes']/text()", namespaces=ns) + has_quotes_access = has_quotes_attr and has_quotes_attr[0].lower() == 'true' + + has_quotes = has_quotes_comp or has_quotes_access + is_temp = name.startswith("#") + + # Apply quotes based on HasQuotes or if it's the first component and not temp + if has_quotes or (i == 0 and not is_temp and '"' not in name): # Avoid double quotes + symbol_text_parts.append(f'"{name}"') + else: + symbol_text_parts.append(name) + + # --- Array Index Access --- + index_access_nodes = comp.xpath("./st:Access", namespaces=ns) + if index_access_nodes: + # Llamada recursiva para cada índice + indices_text = [reconstruct_scl_from_tokens(idx_node) for idx_node in index_access_nodes] + # Limpiar saltos de línea dentro de los corchetes + indices_cleaned = [idx.replace('\n', '').strip() for idx in indices_text] + symbol_text_parts.append(f"[{','.join(indices_cleaned)}]") + + access_str = "".join(symbol_text_parts) + else: + access_str = f"/*_ERR_NO_SYMBOL_IN_{scope}_*/" + + # --- Constantes Literales --- + elif scope == "LiteralConstant": + constant_elem = elem.xpath("./st:Constant", namespaces=ns) + if constant_elem: + val_elem = constant_elem[0].xpath("./st:ConstantValue/text()", namespaces=ns) + type_elem = constant_elem[0].xpath("./st:ConstantType/text()", namespaces=ns) + const_type = type_elem[0].strip().lower() if type_elem and type_elem[0] is not None else "" + const_val = val_elem[0].strip() if val_elem and val_elem[0] is not None else "_ERR_CONSTVAL_" + + # Formatear según tipo + if const_type == "bool": access_str = const_val.upper() + elif const_type.lower() == "string": + replaced_val = const_val.replace("'", "''") + access_str = f"'{replaced_val}'" + elif const_type.lower() == "char": + replaced_val = const_val.replace("'", "''") + access_str = f"'{replaced_val}'" + elif const_type == "wstring": + replaced_val = const_val.replace("'", "''") + access_str = f"WSTRING#'{replaced_val}'" + elif const_type == "wchar": + replaced_val = const_val.replace("'", "''") + access_str = f"WCHAR#'{replaced_val}'" + elif const_type == "time": access_str = f"T#{const_val}" + elif const_type == "ltime": access_str = f"LT#{const_val}" + elif const_type == "s5time": access_str = f"S5T#{const_val}" + elif const_type == "date": access_str = f"D#{const_val}" + elif const_type == "dtl": access_str = f"DTL#{const_val}" + elif const_type == "dt": access_str = f"DT#{const_val}" + elif const_type == "tod": access_str = f"TOD#{const_val}" + elif const_type in ["int", "dint", "sint", "usint", "uint", "udint", "real", "lreal", "word", "dword", "byte"]: + # Añadir .0 para reales si no tienen decimal + if const_type in ["real", "lreal"] and '.' not in const_val and 'e' not in const_val.lower(): + access_str = f"{const_val}.0" + else: + access_str = const_val + else: # Otros tipos (LWORD, etc.) o desconocidos + access_str = const_val + else: + access_str = "/*_ERR_NOCONST_*/" + + # --- Llamadas a Funciones/Bloques (Scope=Call) --- + elif scope == "Call": + call_info_node = elem.xpath("./st:CallInfo", namespaces=ns) + if call_info_node: + ci = call_info_node[0] + call_name = ci.get("Name", "_ERR_CALLNAME_") + call_type = ci.get("BlockType") # FB, FC, etc. + + # Parámetros (están como Access o Token dentro de CallInfo/Parameter) + params = ci.xpath("./st:Parameter", namespaces=ns) + param_parts = [] + for p in params: + p_name = p.get("Name", "_ERR_PARAMNAME_") + # El valor del parámetro está dentro del nodo Parameter + p_value_node = p.xpath("./st:Access | ./st:Token", namespaces=ns) # Buscar Access o Token + p_value_scl = "" + if p_value_node: + p_value_scl = reconstruct_scl_from_tokens(p) # Parsear el contenido del parámetro + p_value_scl = p_value_scl.replace('\n', '').strip() # Limpiar SCL resultante + param_parts.append(f"{p_name} := {p_value_scl}") + + # Manejar FB vs FC + if call_type == "FB": + instance_node = ci.xpath("./st:Instance/st:Component/@Name", namespaces=ns) + if instance_node: + instance_name = f'"{instance_node[0]}"' + access_str = f"{instance_name}({', '.join(param_parts)})" + else: # FB sin instancia? Podría ser STAT + access_str = f'"{call_name}"({", ".join(param_parts)}) (* FB sin instancia explícita? *)' + elif call_type == "FC": + access_str = f'"{call_name}"({", ".join(param_parts)})' + else: # Otros tipos de llamada + access_str = f'"{call_name}"({", ".join(param_parts)}) (* Tipo: {call_type} *)' + else: + access_str = "/*_ERR_NO_CALLINFO_*/" + + # Añadir más scopes si son necesarios (e.g., Address, Label, Reference) + + scl_parts.append(access_str) + + elif tag == "Comment" or tag == "LineComment": + # Usar get_multilingual_text del parser_utils + comment_text = get_multilingual_text(elem) + if tag == "Comment": + scl_parts.append(f"(* {comment_text} *)") + else: + scl_parts.append(f"// {comment_text}") + # Ignorar otros tipos de nodos si no son relevantes para el SCL + + full_scl = "".join(scl_parts) + + # --- Re-indentación Simple --- + output_lines = [] + indent_level = 0 + indent_str = " " # Dos espacios + for line in full_scl.splitlines(): + trimmed_line = line.strip() + if not trimmed_line: + # Mantener líneas vacías? Opcional. + # output_lines.append("") + continue + + # Reducir indentación ANTES de imprimir para END, ELSE, etc. + if trimmed_line.upper().startswith(("END_", "UNTIL", "}")) or \ + trimmed_line.upper() in ["ELSE", "ELSIF"]: + indent_level = max(0, indent_level - 1) + + output_lines.append(indent_str * indent_level + trimmed_line) + + # Aumentar indentación DESPUÉS de imprimir para IF, FOR, etc. + # Ser más específico con las palabras clave que aumentan indentación + # Usar .upper() para ignorar mayúsculas/minúsculas + line_upper = trimmed_line.upper() + if line_upper.endswith(("THEN", "DO", "OF", "{")) or \ + line_upper.startswith(("IF ", "FOR ", "WHILE ", "CASE ", "REPEAT", "STRUCT")) or \ + line_upper == "ELSE": + # Excepción: No indentar después de ELSE IF + if not (line_upper == "ELSE" and "IF" in output_lines[-1].upper()): + indent_level += 1 + + return "\n".join(output_lines) + + +def parse_scl_network(network_element): + """ + Parsea una red SCL extrayendo el código fuente reconstruido. + Devuelve un diccionario representando la red para el JSON. + """ + network_id = network_element.get("ID", "UnknownSCL_ID") + network_lang = "SCL" # Sabemos que es SCL + + # Buscar NetworkSource y luego StructuredText + network_source_node = network_element.xpath(".//flg:NetworkSource", namespaces=ns) + structured_text_node = None + if network_source_node: + structured_text_node_list = network_source_node[0].xpath("./st:StructuredText", namespaces=ns) + if structured_text_node_list: + structured_text_node = structured_text_node_list[0] + + reconstructed_scl = "// SCL extraction failed: StructuredText node not found.\n" + if structured_text_node is not None: + reconstructed_scl = reconstruct_scl_from_tokens(structured_text_node) + + # Crear la estructura de datos para la red + parsed_network_data = { + "id": network_id, + "language": network_lang, + "logic": [ # SCL se guarda como un único bloque lógico + { + "instruction_uid": f"SCL_{network_id}", # UID sintético + "type": "RAW_SCL_CHUNK", # Tipo especial para SCL crudo + "scl": reconstructed_scl, # El código SCL reconstruido + } + ], + # No añadimos error aquí, reconstruct_scl_from_tokens ya incluye comentarios de error + } + return parsed_network_data + +# --- Función de Información del Parser --- +def get_parser_info(): + """Devuelve la información para este parser.""" + return { + 'language': ['SCL'], # Lista de lenguajes soportados + 'parser_func': parse_scl_network # Función a llamar + } \ No newline at end of file diff --git a/backend/script_groups/XML Parser to SCL/parsers/parse_stl.py b/backend/script_groups/XML Parser to SCL/parsers/parse_stl.py new file mode 100644 index 0000000..19c8d2b --- /dev/null +++ b/backend/script_groups/XML Parser to SCL/parsers/parse_stl.py @@ -0,0 +1,526 @@ +# ToUpload/parsers/parse_stl.py +# -*- coding: utf-8 -*- +from lxml import etree +import traceback +import re # Needed for substitutions in get_access_text_stl + +# Importar desde las utilidades del parser +# ns y get_multilingual_text son necesarios +from .parser_utils import ns, get_multilingual_text + +# --- Funciones Auxiliares de Reconstrucción STL --- + + +def get_access_text_stl(access_element): + """ + Reconstruye una representación textual simple de un Access en STL. + Intenta manejar los diferentes tipos de acceso definidos en el XSD. + """ + if access_element is None: + return "_ERR_ACCESS_" + + # --- Símbolo (Variable, Constante Simbólica) --- + # Busca dentro del usando el namespace stl + symbol_elem = access_element.xpath("./stl:Symbol", namespaces=ns) + if symbol_elem: + components = symbol_elem[0].xpath("./stl:Component", namespaces=ns) + parts = [] + for i, comp in enumerate(components): + name = comp.get("Name", "_ERR_COMP_") + # Comprobar HasQuotes (puede estar en el Access o Componente, priorizar Componente) + has_quotes_comp = comp.get("HasQuotes", "false").lower() == "true" + has_quotes_access = False + access_parent = comp.xpath("ancestor::stl:Access[1]", namespaces=ns) + if access_parent: + has_quotes_attr = access_parent[0].xpath( + "./stl:BooleanAttribute[@Name='HasQuotes']/text()", namespaces=ns + ) + has_quotes_access = ( + has_quotes_attr and has_quotes_attr[0].lower() == "true" + ) + + has_quotes = has_quotes_comp or has_quotes_access + is_temp = name.startswith("#") + + if i > 0: + parts.append(".") # Separador para estructuras + + # Aplicar comillas si es necesario + if has_quotes or ( + i == 0 and not is_temp and '"' not in name and "." not in name + ): + # Añadir comillas si HasQuotes es true, o si es el primer componente, + # no es temporal, no tiene ya comillas, y no es parte de una DB (ej. DB10.DBX0.0) + parts.append(f'"{name}"') + else: + parts.append(name) + + # Índices de Array (Access anidado dentro de Component) + index_access = comp.xpath("./stl:Access", namespaces=ns) + if index_access: + indices = [get_access_text_stl(ia) for ia in index_access] + # Limpiar índices (quitar saltos de línea, etc.) + indices_cleaned = [idx.replace("\n", "").strip() for idx in indices] + parts.append(f"[{','.join(indices_cleaned)}]") + + return "".join(parts) + + # --- Constante Literal --- + # Busca dentro del usando el namespace stl + constant_elem = access_element.xpath("./stl:Constant", namespaces=ns) + if constant_elem: + # Obtener valor y tipo + val_elem = constant_elem[0].xpath("./stl:ConstantValue/text()", namespaces=ns) + type_elem = constant_elem[0].xpath("./stl:ConstantType/text()", namespaces=ns) + const_type = ( + type_elem[0].strip().lower() + if type_elem and type_elem[0] is not None + else "" + ) + const_val = ( + val_elem[0].strip() + if val_elem and val_elem[0] is not None + else "_ERR_CONST_" + ) + + # Añadir prefijos estándar STL + if const_type == "time": + return f"T#{const_val}" + if const_type == "s5time": + return f"S5T#{const_val}" + if const_type == "date": + return f"D#{const_val}" + if const_type == "dt": + return f"DT#{const_val}" + if const_type == "time_of_day" or const_type == "tod": + return f"TOD#{const_val}" + if const_type == "ltime": + return f"LT#{const_val}" # Añadido LTIME + if const_type == "dtl": + return f"DTL#{const_val}" # Añadido DTL + + # Strings y Chars (Escapar comillas simples internas) + if const_type == "string": + replaced_val = const_val.replace("'", "''") + return f"'{replaced_val}'" + if const_type == "char": + replaced_val = const_val.replace("'", "''") + return f"'{replaced_val}'" + if const_type == "wstring": + replaced_val = const_val.replace("'", "''") + return f"WSTRING#'{replaced_val}'" + if const_type == "wchar": + replaced_val = const_val.replace("'", "''") + return f"WCHAR#'{replaced_val}'" + + # Tipos numéricos con prefijo opcional (Hexadecimal) + if const_val.startswith("16#"): + if const_type == "byte": + return f"B#{const_val}" + if const_type == "word": + return f"W#{const_val}" + if const_type == "dword": + return f"DW#{const_val}" + if const_type == "lword": + return f"LW#{const_val}" # Añadido LWORD + + # Formato Real (añadir .0 si es necesario) + if ( + const_type in ["real", "lreal"] + and "." not in const_val + and "e" not in const_val.lower() + ): + # Verificar si es un número antes de añadir .0 + try: + float(const_val) # Intenta convertir a float + return f"{const_val}.0" + except ValueError: + return const_val # No es número, devolver tal cual + # Otros tipos numéricos o desconocidos + return const_val # Valor por defecto + + # --- Etiqueta (Label) --- + # Busca