From 239126bb96a53e73c9c4757b6f818596ae451700 Mon Sep 17 00:00:00 2001 From: Miguel Date: Fri, 2 May 2025 22:59:35 +0200 Subject: [PATCH] Agregando Scrips de Exportacion de Tia Portal y conversion de la configuracion de hardware a md --- __pycache__/config_manager.cpython-310.pyc | Bin 15068 -> 15098 bytes .../__pycache__/script_utils.cpython-310.pyc | Bin 1452 -> 1537 bytes .../ObtainIOFromProjectTia/esquema_group.json | 4 + .../ObtainIOFromProjectTia/esquema_work.json | 4 + .../ObtainIOFromProjectTia/x1.py | 277 +++++ .../ObtainIOFromProjectTia/x2.py | 269 +++++ .../ObtainIOFromProjectTia/x3.py | 1029 +++++++++++++++++ backend/script_utils.py | 8 +- config_manager.py | 7 +- data/log.txt | 17 +- 10 files changed, 1594 insertions(+), 21 deletions(-) create mode 100644 backend/script_groups/ObtainIOFromProjectTia/esquema_group.json create mode 100644 backend/script_groups/ObtainIOFromProjectTia/esquema_work.json create mode 100644 backend/script_groups/ObtainIOFromProjectTia/x1.py create mode 100644 backend/script_groups/ObtainIOFromProjectTia/x2.py create mode 100644 backend/script_groups/ObtainIOFromProjectTia/x3.py diff --git a/__pycache__/config_manager.cpython-310.pyc b/__pycache__/config_manager.cpython-310.pyc index fe745998f7dcb997c67b40cc363eddfc54611de7..256b5f7c74333fe815354d822e5160d82ff4afeb 100644 GIT binary patch delta 707 zcmZvYO=uHA6vy{%V%pSf651r@%Qk7%CXJ0$6C2|}#T5JbGl;-Ov@s$Tp8#hDZeiVN?z^YQ=9Jmy{PXH6s=d0SO4@<8K-@K;JEi@qoqktMyi~j9Z3|5R;rcc+e43vb4%M*;R8md5 zBqORr3$%qh!d;Okd+RRML_$sQv#FF1l5jy};UF0;0kQP-!}xL{5SRg>VFTf&PjRTW zCN!Kq`7D#iP>jXkkRFe{0IShQ$s7)5bClBeZZ zSQ8hgM<*ZdrS~r1mH)naL}W)jL2Ww2THQ=|(U~wKq*6ZK?f--@UP_$~ud&^zh5C3X w%kfXc>-gF=gqtZ3c=2uOy|cqJ{GsS`4js2nt$>VI(jHjFdO8FvxSG!V0V_zv^8f$< delta 629 zcmZ9H&ubG=5XW~m>DDIMY-pNIlZ|cKfHjS#wB(nm2-5hYpos@T>P663un`*FY*1ul z3cZLBEFJ5`AMCbPTU2OS@TT||h$46hV({KWPXR^6_qO1{2lILJX68Hd=4b6|O{|Eb zi=$^%4qWwbiw!3ftfsRQbGd17idyxt)jYTWFo)jJctg?rnxouDd?eE3$^K?{UF_j> zu^cdDldF4mZ*@l(jiAYqKn>KLdAIIWJGz6J?lt;NZV`aH@r^1%MwlujU0MRwtvmKe z26V|#Ozsi)glCm@yjORSLHi}j3sS()4d#MtvV=Hwg`hu3I8x)(MdYB((P#9iP{KAvz<5YGxJpyaEl^!2_ z#!?3ux*2*IatyZc^>RjrwiV1=06Mag9riu{m)jeKX4dawtp|82dWG+@1^ShCsvniy zQP^PVE|!ks_1xsp8uj9W8e&Z@G5j}v9Y6K?v7M74h@ISbpTTO_+Ugd=HongX@z0S4 T5WJI@;UTW%qwoOV zEg|Sw0#QqEI;*DOLU-72_S@NSW_Rbi_o?0-HySmdaj^HS|H`>E6#jIo<)2ld@+)~M9@4Gp%cm;TY(yZ3tk#-S`W@e$3Wh5%RZ z49u{g#4C_N3PYBm;X^T#6*?&vWErcNn=;J`mhp11?0t8GsR!3hzax@{KwG)~hinl3lykXJfLLq7yS$5U-{k3R_PAT zlZ&uxN|~6TdseF6APUceQCPBZQi_Yf^h;L{;%dtcP7}M6AB(pej$iKvQ541>dNG~% zy}yhSl=(NYjcfT&akA*zR_qY4R-HMR@5`+x8$@?i?2(I~T2T3Wx%_(BE^RDc`xn-U Ymg-Y5Qb-V_r8+c5s4-$+V}-&0(KHa5G(5xA=g~97fiem$eo15J4GT`g%GY3 zA!ltP7S{d^|AW1yb0>nj%e*u1?lU{EH-)FX)i=!?(0VMdoK2OvwNbB=lOWLp`#YnJ zQSx-ml+Ub)1;!#5vILvJK&k=+OgLs_ zZ6EylkQ&q7qG%G(ARsWh`H_PV5jb9=AzDxfc|6C`t{U+x-XJc`LnPK5UH&5VAF}k1 z975H2c}0cAZ+wPUd@eQ(-ErC-&vl&mPMk7f#~)%3EAg_@%B$&@?2xcx94g!98kJRT ppivqW<7cIOP)f`6cu@H(t7PBx!(Lxrc Associating Network '{data['networks'][network_id]['name']}' with PLC '{plc_object.get('name', 'Unknown PLC')}' (via Node '{node_name_for_log}' Addr: {address})" + ) + data["plcs"][potential_plc_id]["connected_networks"][ + network_id + ] = address + elif ( + plc_object["connected_networks"][network_id] == "N/A" + and address != "N/A" + ): + print( + f" --> Updating address for Network '{data['networks'][network_id]['name']}' on PLC '{plc_object.get('name', 'Unknown PLC')}' to: {address}" + ) + data["plcs"][potential_plc_id]["connected_networks"][ + network_id + ] = address + else: + print( + f" Warning: Could not map linked device ID {linked_device_id} (from NodeID {device_node_id}) to any known device." + ) + continue + if not network_id: # Generic links + source_id, source_suffix, target_id, target_suffix = ( + side_a_id, + side_a_suffix, + side_b_id, + side_b_suffix, + ) + if ("Channel" in side_b_suffix or "Parameter" in side_b_suffix) and ( + "Channel" not in side_a_suffix and "Parameter" not in side_a_suffix + ): + source_id, source_suffix, target_id, target_suffix = ( + side_b_id, + side_b_suffix, + side_a_id, + side_a_suffix, + ) + if source_id != "N/A" and target_id != "N/A": + if source_id not in data["links_by_source"]: + data["links_by_source"][source_id] = [] + if target_id not in data["links_by_target"]: + data["links_by_target"][target_id] = [] + link_detail = { + "name": link_name, + "source_id": source_id, + "source_suffix": source_suffix, + "target_id": target_id, + "target_suffix": target_suffix, + "source_device_name": data["devices"] + .get(source_id, {}) + .get("name", source_id), + "target_device_name": data["devices"] + .get(target_id, {}) + .get("name", target_id), + } + data["links_by_source"][source_id].append(link_detail) + data["links_by_target"][target_id].append(link_detail) + data["connections"].append(link_detail) + print("Data extraction and structuring complete.") + return data + + +# --- Helper Function for Recursive IO Search (Unchanged from v20) --- +def find_io_recursively(device_id, project_data): + """Recursively finds all IO addresses under a given device ID.""" + io_list = [] + device_info = project_data.get("devices", {}).get(device_id) + if not device_info: + return io_list + if device_info.get("io_addresses"): + for addr in device_info["io_addresses"]: + io_list.append( + { + "module_name": device_info.get("name", device_id), + "module_pos": device_info.get("position", "N/A"), + **addr, + } + ) + children_ids = device_info.get("children_ids", []) + for child_id in children_ids: + if child_id != device_id: # Basic loop prevention + io_list.extend(find_io_recursively(child_id, project_data)) + return io_list + + +# --- generate_markdown_tree function (v26 - Final Cleaned Version) --- +def generate_markdown_tree(project_data, md_file_path): + """(v26) Generates final hierarchical Markdown with aesthetic improvements.""" + markdown_lines = ["# Project Hardware & IO Summary (Tree View v26)", ""] + + if not project_data or not project_data.get("plcs"): + markdown_lines.append("*No PLC identified in the project data.*") + try: + with open(md_file_path, "w", encoding="utf-8") as f: + f.write("\n".join(markdown_lines)) + print(f"\nMarkdown summary written to: {md_file_path}") + except Exception as e: + print(f"ERROR writing Markdown file {md_file_path}: {e}") + return + + markdown_lines.append(f"Identified {len(project_data['plcs'])} PLC(s).") + + for plc_id, plc_info in project_data.get("plcs", {}).items(): + markdown_lines.append(f"\n## PLC: {plc_info.get('name', plc_id)}") + type_name = plc_info.get("type_name", "N/A") + order_num = plc_info.get("order_number", "N/A") + firmware = plc_info.get("firmware_version", "N/A") + if type_name and type_name != "N/A": + markdown_lines.append(f"- **Type Name:** `{type_name}`") + if order_num and order_num != "N/A": + markdown_lines.append(f"- **Order Number:** `{order_num}`") + if firmware and firmware != "N/A": + markdown_lines.append(f"- **Firmware:** `{firmware}`") + # ID removed + + plc_networks = plc_info.get("connected_networks", {}) + markdown_lines.append("\n- **Networks:**") + if not plc_networks: + markdown_lines.append( + " - *No network connections found associated with this PLC object.*" + ) + else: + sorted_network_items = sorted( + plc_networks.items(), + key=lambda item: project_data.get("networks", {}) + .get(item[0], {}) + .get("name", item[0]), + ) + + for net_id, plc_addr_on_net in sorted_network_items: + net_info = project_data.get("networks", {}).get(net_id) + if not net_info: + markdown_lines.append( + f" - !!! Error: Network info missing for ID {net_id} !!!" + ) + continue + + markdown_lines.append( + f" - ### {net_info.get('name', net_id)} ({net_info.get('type', 'Unknown')})" + ) + markdown_lines.append( + f" - PLC Address on this Net: `{plc_addr_on_net}`" + ) + markdown_lines.append(f" - **Devices on Network:**") + + devices_on_this_net = net_info.get("devices_on_net", {}) + + def sort_key(item): + node_id, node_addr = item + try: + parts = [int(p) for p in re.findall(r"\d+", node_addr)] + return parts + except: + return [float("inf")] + + plc_interface_and_node_ids = set() + for node in plc_info.get("network_nodes", []): + plc_interface_and_node_ids.add(node["id"]) + interface_id = ( + project_data["devices"].get(node["id"], {}).get("parent_id") + ) + if interface_id: + plc_interface_and_node_ids.add(interface_id) + plc_interface_and_node_ids.add(plc_id) + + other_device_items = sorted( + [ + (node_id, node_addr) + for node_id, node_addr in devices_on_this_net.items() + if node_id not in plc_interface_and_node_ids + ], + key=sort_key, + ) + + if not other_device_items: + markdown_lines.append(" - *None (besides PLC interfaces)*") + else: + # --- Display Logic with Sibling IO Aggregation & Aesthetics --- + for node_id, node_addr in other_device_items: + node_info = project_data.get("devices", {}).get(node_id) + if not node_info: + markdown_lines.append( + f" - !!! Error: Node info missing for ID {node_id} Addr: {node_addr} !!!" + ) + continue + + interface_id = node_info.get("parent_id") + interface_info = None + actual_device_id = None + actual_device_info = None + rack_id = None + rack_info = None + if interface_id: + interface_info = project_data.get("devices", {}).get( + interface_id + ) + if interface_info: + actual_device_id = interface_info.get("parent_id") + if actual_device_id: + actual_device_info = project_data.get( + "devices", {} + ).get(actual_device_id) + if actual_device_info: + potential_rack_id = actual_device_info.get( + "parent_id" + ) + if potential_rack_id: + potential_rack_info = project_data.get( + "devices", {} + ).get(potential_rack_id) + if potential_rack_info and ( + "Rack" + in potential_rack_info.get("name", "") + or potential_rack_info.get("position") + is None + ): + rack_id = potential_rack_id + rack_info = potential_rack_info + + display_info_title = ( + actual_device_info + if actual_device_info + else (interface_info if interface_info else node_info) + ) + display_id_title = ( + actual_device_id + if actual_device_info + else (interface_id if interface_info else node_id) + ) + + io_search_root_id = ( + actual_device_id + if actual_device_info + else (interface_id if interface_info else node_id) + ) + io_search_root_info = project_data.get("devices", {}).get( + io_search_root_id + ) + + # Construct Title + display_name = display_info_title.get("name", display_id_title) + via_node_name = node_info.get("name", node_id) + title_str = f"#### {display_name}" + if display_id_title != node_id: + title_str += f" (via {via_node_name} @ `{node_addr}`)" + else: + title_str += f" (@ `{node_addr}`)" + markdown_lines.append(f" - {title_str}") + + # Display Basic Details + markdown_lines.append( + f" - Address (on net): `{node_addr}`" + ) + type_name_disp = display_info_title.get("type_name", "N/A") + order_num_disp = display_info_title.get("order_number", "N/A") + pos_disp = display_info_title.get("position", "N/A") + if type_name_disp and type_name_disp != "N/A": + markdown_lines.append( + f" - Type Name: `{type_name_disp}`" + ) + if order_num_disp and order_num_disp != "N/A": + markdown_lines.append( + f" - Order No: `{order_num_disp}`" + ) + if pos_disp and pos_disp != "N/A": + markdown_lines.append( + f" - Pos (in parent): `{pos_disp}`" + ) + ultimate_parent_id = rack_id + if not ultimate_parent_id and actual_device_info: + ultimate_parent_id = actual_device_info.get("parent_id") + if ( + ultimate_parent_id + and ultimate_parent_id != display_id_title + ): + ultimate_parent_info = project_data.get("devices", {}).get( + ultimate_parent_id + ) + ultimate_parent_name = ( + ultimate_parent_info.get("name", "?") + if ultimate_parent_info + else "?" + ) + markdown_lines.append( + f" - Parent Structure: `{ultimate_parent_name}`" + ) # Removed ID here + + # --- IO Aggregation Logic (from v24) --- + aggregated_io_addresses = [] + parent_structure_id = ( + io_search_root_info.get("parent_id") + if io_search_root_info + else None + ) + io_search_root_name_disp = ( + io_search_root_info.get("name", "?") + if io_search_root_info + else "?" + ) + + if parent_structure_id: + parent_structure_info = project_data.get("devices", {}).get( + parent_structure_id + ) + parent_structure_name = ( + parent_structure_info.get("name", "?") + if parent_structure_info + else "?" + ) + search_title = f"parent '{parent_structure_name}'" + sibling_found_io = False + for dev_scan_id, dev_scan_info in project_data.get( + "devices", {} + ).items(): + if ( + dev_scan_info.get("parent_id") + == parent_structure_id + ): + io_from_sibling = find_io_recursively( + dev_scan_id, project_data + ) + if io_from_sibling: + aggregated_io_addresses.extend(io_from_sibling) + sibling_found_io = True + if ( + not sibling_found_io and not aggregated_io_addresses + ): # Only show message if list still empty + markdown_lines.append( + f" - *No IO Addresses found in modules under {search_title} (ID: {parent_structure_id}).*" + ) + + elif io_search_root_id: + search_title = f"'{io_search_root_name_disp}'" + aggregated_io_addresses = find_io_recursively( + io_search_root_id, project_data + ) + if not aggregated_io_addresses: + markdown_lines.append( + f" - *No IO Addresses found in modules under {search_title} (ID: {io_search_root_id}).*" + ) + else: + markdown_lines.append( + f" - *Could not determine structure to search for IO addresses.*" + ) + # --- End IO Aggregation --- + + # Display aggregated IO Addresses with Siemens format (Cleaned) + if aggregated_io_addresses: + markdown_lines.append( + f" - **IO Addresses (Aggregated from Structure):**" + ) # Removed redundant search root name + sorted_agg_io = sorted( + aggregated_io_addresses, + key=lambda x: ( + ( + int(x.get("module_pos", "9999")) + if x.get("module_pos", "9999").isdigit() + else 9999 + ), + x.get("module_name", ""), + x.get("type", ""), + ( + int(x.get("start", "0")) + if x.get("start", "0").isdigit() + else float("inf") + ), + ), + ) + last_module_id_key = None + for addr_info in sorted_agg_io: + current_module_id_key = ( + addr_info.get("module_name", "?"), + addr_info.get("module_pos", "?"), + ) + if current_module_id_key != last_module_id_key: + markdown_lines.append( + f" - **From Module:** {addr_info.get('module_name','?')} (Pos: {addr_info.get('module_pos','?')})" + ) + last_module_id_key = current_module_id_key + + # --- Siemens IO Formatting (from v25.1 - keep fixes) --- + io_type = addr_info.get("type", "?") + start_str = addr_info.get("start", "?") + length_str = addr_info.get("length", "?") + area_str = addr_info.get("area", "?") + siemens_addr = f"FMT_ERROR" # Default error + length_bits = 0 + try: + start_byte = int(start_str) + length_bits = int(length_str) + length_bytes = math.ceil( + length_bits / 8.0 + ) # Use float division + if length_bits > 0 and length_bytes == 0: + length_bytes = 1 # Handle len < 8 bits + end_byte = start_byte + length_bytes - 1 + prefix = "P?" + if io_type.lower() == "input": + prefix = "PE" + elif io_type.lower() == "output": + prefix = "PA" + siemens_addr = f"{prefix} {start_byte}..{end_byte}" + except Exception: # Catch any error during calc/format + siemens_addr = ( + f"FMT_ERROR({start_str},{length_str})" + ) + + markdown_lines.append( + f" - `{siemens_addr}` (Len={length_bits} bits)" # Simplified output + ) + # --- End Siemens IO Formatting --- + + # IO Connections logic remains the same... + links_from = project_data.get("links_by_source", {}).get( + display_id_title, [] + ) + links_to = project_data.get("links_by_target", {}).get( + display_id_title, [] + ) + io_conns = [] + for link in links_from: + if "channel" in link["source_suffix"].lower(): + target_str = f"{link.get('target_device_name', link['target_id'])}:{link['target_suffix']}" + if link["target_id"] == display_id_title: + target_str = link["target_suffix"] + io_conns.append( + f"`{link['source_suffix']}` → `{target_str}`" + ) + for link in links_to: + if "channel" in link["target_suffix"].lower(): + source_str = f"{link.get('source_device_name', link['source_id'])}:{link['source_suffix']}" + if link["source_id"] == display_id_title: + source_str = link["source_suffix"] + io_conns.append( + f"`{source_str}` → `{link['target_suffix']}`" + ) + if io_conns: + markdown_lines.append( + f" - **IO Connections (Channels):**" + ) + for conn in sorted(list(set(io_conns))): + markdown_lines.append(f" - {conn}") + markdown_lines.append("") # Spacing + # --- *** END Display Logic *** --- + + try: + with open(md_file_path, "w", encoding="utf-8") as f: + f.write("\n".join(markdown_lines)) + print(f"\nMarkdown summary written to: {md_file_path}") + except Exception as e: + print(f"ERROR writing Markdown file {md_file_path}: {e}") + traceback.print_exc() + + +# --- generate_io_upward_tree function (Unchanged from v23) --- +def generate_io_upward_tree(project_data, md_file_path): + """(v23) Generates a debug tree starting from IO addresses upwards.""" + markdown_lines = ["# IO Address Upward Connection Trace (Debug v23)", ""] + if not project_data or not project_data.get("devices"): + markdown_lines.append("*No device data found.*") + try: + with open(md_file_path, "w", encoding="utf-8") as f: + f.write("\n".join(markdown_lines)) + print(f"\nIO upward debug tree written to: {md_file_path}") + except Exception as e: + print(f"ERROR writing IO upward debug tree file {md_file_path}: {e}") + return + node_to_network_map = {} + for net_id, net_info in project_data.get("networks", {}).items(): + net_name = net_info.get("name", "?") + for node_id, node_addr in net_info.get("devices_on_net", {}).items(): + node_to_network_map[node_id] = (net_id, net_name, node_addr) + devices_with_io = [] + for dev_id, dev_info in project_data.get("devices", {}).items(): + if dev_info.get("io_addresses"): + devices_with_io.append((dev_id, dev_info)) + if not devices_with_io: + markdown_lines.append("*No devices with defined IO Addresses found.*") + else: + markdown_lines.append( + f"Found {len(devices_with_io)} device(s)/module(s) with IO addresses. Tracing connections upwards:\n" + ) + devices_with_io.sort( + key=lambda item: ( + ( + int(item[1].get("position", "9999")) + if item[1].get("position", "9999").isdigit() + else 9999 + ), + item[1].get("name", ""), + ) + ) + for dev_id, dev_info in devices_with_io: + markdown_lines.append( + f"## IO Module: {dev_info.get('name', dev_id)} (ID: {dev_id})" + ) + markdown_lines.append(f"- Position: {dev_info.get('position', 'N/A')}") + markdown_lines.append("- IO Addresses:") + for addr in sorted( + dev_info["io_addresses"], + key=lambda x: ( + x.get("type", ""), + ( + int(x.get("start", "0")) + if x.get("start", "0").isdigit() + else float("inf") + ), + ), + ): + markdown_lines.append( + f" - `{addr.get('type','?').ljust(6)} Start={addr.get('start','?').ljust(4)} Len={addr.get('length','?').ljust(3)}` (Area: {addr.get('area','?')})" + ) + markdown_lines.append("- Upward Path:") + current_id = dev_id + current_info = dev_info + indent = " " + path_found = False + ancestor_limit = 15 + count = 0 + while current_id and count < ancestor_limit: + ancestor_name = current_info.get("name", "?") if current_info else "?" + ancestor_pos = ( + current_info.get("position", "N/A") if current_info else "N/A" + ) + markdown_lines.append( + f"{indent}└─ {ancestor_name} (ID: {current_id}, Pos: {ancestor_pos})" + ) + if current_id in node_to_network_map: + net_id, net_name, node_addr = node_to_network_map[current_id] + markdown_lines.append(f"{indent} └─ **Network Connection Point**") + markdown_lines.append( + f"{indent} - Node: {ancestor_name} (ID: {current_id})" + ) + markdown_lines.append( + f"{indent} - Network: {net_name} (ID: {net_id})" + ) + markdown_lines.append(f"{indent} - Address: `{node_addr}`") + plc_connection_found = False + for plc_id_check, plc_info_check in project_data.get( + "plcs", {} + ).items(): + if net_id in plc_info_check.get("connected_networks", {}): + markdown_lines.append( + f"{indent} - **Network associated with PLC:** {plc_info_check.get('name','?')} (ID: {plc_id_check})" + ) + plc_connection_found = True + break + if not plc_connection_found: + markdown_lines.append( + f"{indent} - *Network not directly associated with a known PLC in data.*" + ) + path_found = True + break + if current_id in project_data.get("plcs", {}): + markdown_lines.append(f"{indent} └─ **Is PLC:** {ancestor_name}") + path_found = True + break + parent_id = current_info.get("parent_id") if current_info else None + if parent_id: + current_info = project_data.get("devices", {}).get(parent_id) + if not current_info: + markdown_lines.append( + f"{indent} └─ Parent ID {parent_id} not found. Stopping trace." + ) + break + current_id = parent_id + indent += " " + else: + markdown_lines.append( + f"{indent} └─ Reached top level (no parent)." + ) + break + count += 1 + if count >= ancestor_limit: + markdown_lines.append( + f"{indent} └─ Reached ancestor limit. Stopping trace." + ) + if not path_found: + markdown_lines.append( + f"{indent}└─ *Could not trace path to a known Network Node or PLC.*" + ) + markdown_lines.append("") + try: + with open(md_file_path, "w", encoding="utf-8") as f: + f.write("\n".join(markdown_lines)) + print(f"\nIO upward debug tree written to: {md_file_path}") + except Exception as e: + print(f"ERROR writing IO upward debug tree file {md_file_path}: {e}") + + +# --- process_aml_file function (unchanged from v22) --- +def process_aml_file( + aml_file_path, json_output_path, md_output_path, md_upward_output_path +): + # (Unchanged) + print(f"Processing AML file: {aml_file_path}") + if not os.path.exists(aml_file_path): + print(f"ERROR: Input AML file not found at {aml_file_path}") + return + try: + parser = ET.XMLParser(remove_blank_text=True, huge_tree=True) + tree = ET.parse(aml_file_path, parser) + root = tree.getroot() + project_data = extract_aml_data(root) # v15 extraction + print(f"Generating JSON output: {json_output_path}") + try: + with open(json_output_path, "w", encoding="utf-8") as f: + json.dump(project_data, f, indent=4, default=str) + print(f"JSON data written successfully.") + except Exception as e: + print(f"ERROR writing JSON file {json_output_path}: {e}") + traceback.print_exc() + generate_markdown_tree(project_data, md_output_path) # v26 MD generation + generate_io_upward_tree( + project_data, md_upward_output_path + ) # v23 upward generation + except ET.LxmlError as xml_err: + print(f"ERROR parsing XML file {aml_file_path} with lxml: {xml_err}") + traceback.print_exc() + except Exception as e: + print(f"ERROR processing AML file {aml_file_path}: {e}") + traceback.print_exc() + +def select_cax_file(): + """Opens a dialog to select a CAx (XML) export file.""" + root = tk.Tk() + root.withdraw() + file_path = filedialog.askopenfilename( + title="Select CAx Export File (XML)", + filetypes=[("XML Files", "*.xml"), ("All Files", "*.*")] # Changed filetypes + ) + root.destroy() + if not file_path: + print("No CAx file selected. Exiting.") + sys.exit(0) + return file_path + +def select_output_directory(): + """Opens a dialog to select the output directory.""" + root = tk.Tk() + root.withdraw() + dir_path = filedialog.askdirectory( + title="Select Output Directory for JSON and MD files" # Updated title slightly + ) + root.destroy() + if not dir_path: + print("No output directory selected. Exiting.") + sys.exit(0) + return dir_path + +# --- Main Execution --- +if __name__ == "__main__": + # configs = load_configuration() # Keep if needed, otherwise remove + + script_version = "v27 - User Input/Output Paths" # Updated version + print( + f"--- AML (CAx Export) to Hierarchical JSON and Obsidian MD Converter ({script_version}) ---" + ) + + # 1. Select Input CAx File and Output Directory + cax_file_path = select_cax_file() + output_dir = select_output_directory() + + # Convert paths to Path objects + input_path = Path(cax_file_path) + output_path = Path(output_dir) + + # Check if input file exists + if not input_path.is_file(): + print(f"ERROR: Input file '{input_path}' not found or is not a file.") + sys.exit(1) + + # Ensure output directory exists + output_path.mkdir(parents=True, exist_ok=True) + + # Construct output file paths within the selected output directory + output_json_file = output_path / input_path.with_suffix(".hierarchical.json").name + output_md_file = output_path / input_path.with_name(f"{input_path.stem}_Hardware_Tree.md").name + output_md_upward_file = output_path / input_path.with_name(f"{input_path.stem}_IO_Upward_Debug.md").name + + print(f"Input AML: {input_path.resolve()}") + print(f"Output Directory: {output_path.resolve()}") + print(f"Output JSON: {output_json_file.resolve()}") + print(f"Output Main Tree MD: {output_md_file.resolve()}") + print(f"Output IO Debug Tree MD: {output_md_upward_file.resolve()}") + + # Process the selected file and save outputs to the selected directory + process_aml_file( + str(input_path), + str(output_json_file), + str(output_md_file), + str(output_md_upward_file), + ) + + print("\nScript finished.") \ No newline at end of file diff --git a/backend/script_utils.py b/backend/script_utils.py index ad52c41..a73a1c3 100644 --- a/backend/script_utils.py +++ b/backend/script_utils.py @@ -1,5 +1,6 @@ import os import json +import inspect from typing import Dict, Any @@ -20,8 +21,11 @@ def load_configuration() -> Dict[str, Any]: working_dir = configs.get("working_directory", "") """ try: - # Get directory of the calling script - script_dir = os.path.dirname(os.path.abspath(__file__)) + # Obtener el frame del llamador + caller_frame = inspect.stack()[1] + caller_file = caller_frame.filename + # Obtener el directorio del script que llama a esta función + script_dir = os.path.dirname(os.path.abspath(caller_file)) # Path to the config file config_file_path = os.path.join(script_dir, "script_config.json") diff --git a/config_manager.py b/config_manager.py index 4c4a301..0399976 100644 --- a/config_manager.py +++ b/config_manager.py @@ -423,11 +423,12 @@ class ConfigurationManager: stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, - bufsize=1, + encoding='utf-8', # <--- Añadir explícitamente la codificación UTF-8 + errors='replace', # Opcional: reemplazar caracteres mal formados en lugar de fallar + bufsize=1, # Line buffered env=dict( os.environ, - # SCRIPT_CONFIGS=json.dumps(configs), # Commented out as we now use a file - PYTHONIOENCODING="utf-8", + PYTHONIOENCODING="utf-8", # Mantener esto también es bueno ), ) diff --git a/data/log.txt b/data/log.txt index b5a0582..f606c05 100644 --- a/data/log.txt +++ b/data/log.txt @@ -1,16 +1 @@ -[21:32:28] Configuraciones guardadas en d:\Proyectos\Scripts\ParamManagerScripts\backend\script_groups\example_group\script_config.json -[21:32:28] Iniciando ejecución de x1.py -[21:32:33] Configuration file not found: d:\Proyectos\Scripts\ParamManagerScripts\backend\script_config.json -[21:32:33] === Ejecutando Script de Prueba 1 === -[21:32:33] Configuraciones cargadas: -[21:32:33] Nivel 1: {} -[21:32:33] Nivel 2: {} -[21:32:33] Nivel 3: {} -[21:32:33] Simulando procesamiento... -[21:32:33] Progreso: 20% -[21:32:33] Progreso: 40% -[21:32:33] Progreso: 60% -[21:32:33] Progreso: 80% -[21:32:33] Progreso: 100% -[21:32:33] ¡Proceso completado! -[21:32:33] Ejecución completada +[22:54:35] Error: Directorio de trabajo no configurado