Add XML block header parser and SCL header addition script
- Implemented `parse_block_header_from_xml` function to extract block information from TIA Portal XML files. - Created `_extract_common_attributes` helper function to retrieve common attributes from block nodes. - Added `generate_block_header_comment` function to format the header comment based on extracted block information. - Introduced `get_block_header_comment_from_xml` for convenience in generating header comments directly from XML files. - Developed `add_header_to_file` function in a new test script to read SCL files, check for existing headers, and prepend a generated header from the corresponding XML file. - Included error handling and logging for better debugging and user feedback.
This commit is contained in:
parent
5da864abe0
commit
affab8a646
|
@ -0,0 +1,117 @@
|
|||
FUNCTION "1032_FC Manual function" : Void
|
||||
{ S7_Optimized_Access := 'TRUE' }
|
||||
VERSION : 0.1
|
||||
VAR_TEMP
|
||||
wPosition : Word;
|
||||
xrtCurrentLimit : Bool;
|
||||
g : Int;
|
||||
p : Int;
|
||||
m : Int;
|
||||
b : Int;
|
||||
END_VAR
|
||||
|
||||
|
||||
BEGIN
|
||||
|
||||
#g := "DB HMI_1".nGWNumber;
|
||||
#p := "DB HMI_1".nPortNumber;
|
||||
#b := "DB HMI_1".nBoxNumber;
|
||||
#m := "DB HMI_1".nMotorNumber;
|
||||
|
||||
"rtMotInPos"(CLK:="DB HMI_1".xMotStatInPos );
|
||||
|
||||
IF "DB Cycle".Man THEN
|
||||
|
||||
// Run Forward
|
||||
IF "DB HMI_1".xPBMotMoveFw AND
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].xEnable THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Sign := TRUE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].PosType := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Position := "DB HMI_1".nPosition;
|
||||
END_IF;
|
||||
|
||||
// Run Backward
|
||||
IF "DB HMI_1".xPBMotMoveBw AND
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].xEnable THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Sign := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].PosType := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Position := "DB HMI_1".nPosition;
|
||||
END_IF;
|
||||
|
||||
// Run Zero/Position
|
||||
IF "DB HMI_1".xPBMotMoveZeroPos AND
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].xEnable THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].PosType := TRUE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Position := "DB HMI_1".nPosition;
|
||||
END_IF;
|
||||
|
||||
// Stop
|
||||
IF NOT "DB Cycle".xZona_MoveManFw AND NOT "DB Cycle".xZona_MoveManBw THEN
|
||||
IF "DB HMI_1".xPBMotStop OR "rtMotInPos".Q OR "DB HMI_1".xMotStatAlarm THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Position := 0;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].PosType := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Reset := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Sign := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Stop := FALSE;
|
||||
END_IF;
|
||||
IF ("DB HMI_1".xPBMotMoveFw OR "DB HMI_1".xPBMotMoveBw OR "DB HMI_1".xPBMotMoveZeroPos) AND
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].xEnable THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Stop := TRUE;
|
||||
END_IF;
|
||||
END_IF;
|
||||
|
||||
END_IF;
|
||||
|
||||
// Alarm Reset
|
||||
IF "DB HMI_1".xPBMotAlarmReset AND "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].Alarm THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Reset := TRUE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Stop := TRUE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Sign := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].PosType := FALSE;
|
||||
END_IF;
|
||||
|
||||
"rtPB_MotAlmReset"(CLK:= NOT "DB HMI_1".xPBMotAlarmReset);
|
||||
|
||||
IF "rtPB_MotAlmReset".Q THEN
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Reset := FALSE;
|
||||
"DB Gateway".N[#g].write.P[#p].MotorBoxCtrl[#b].Mot[#m].Stop := FALSE;
|
||||
END_IF;
|
||||
|
||||
|
||||
// Motor Current limitation
|
||||
"rtPB_CurrentLimit"(CLK:="DB HMI_1".xPopUpCurrentLim,
|
||||
Q=>#xrtCurrentLimit);
|
||||
IF "rtPB_CurrentLimit".Q THEN
|
||||
"DB HMI_1".snCurrentLimFW := "DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].CurrentLimFW;
|
||||
"DB HMI_1".snCurrentLimBW := "DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].CurrentLimBW;
|
||||
END_IF;
|
||||
IF "DB HMI_1".xPopUpCurrentLim AND NOT "rtPB_CurrentLimit".Q THEN
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].CurrentLimFW := "DB HMI_1".snCurrentLimFW;
|
||||
"DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].CurrentLimBW := "DB HMI_1".snCurrentLimBW;
|
||||
END_IF;
|
||||
IF NOT "DB HMI_1".xPopUpCurrentLim THEN
|
||||
"DB HMI_1".snCurrentLimFW := 0;
|
||||
"DB HMI_1".snCurrentLimBW := 0;
|
||||
END_IF;
|
||||
|
||||
// HMI Motor Status
|
||||
"DB HMI_1".snGatewayFirmware := "DB Gateway".N[#g].read.P[#p].Firmware;
|
||||
"DB HMI_1".nBoxInstalled := USINT_TO_INT ("DB Gateway".N[#g].read.P[#p].MboxNumber);
|
||||
"DB HMI_1".snBoxFirmwareVersion := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].BoxStatus.FirmwareVersion;
|
||||
"DB HMI_1".snBoxFirmwareRevision := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].BoxStatus.FirmwareRevision;
|
||||
"DB HMI_1".xBox_MotEnabled := "DB MotorPar".GW[#g].P[#p].Box[#b].Mot[#m].xEnable;
|
||||
"DB HMI_1".xMotStatRunningFw := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].MovingFW;
|
||||
"DB HMI_1".xMotStatRunningBw := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].MovingBW;
|
||||
"DB HMI_1".xMotStatInPos := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].InPOS;
|
||||
"DB HMI_1".xMotStatAlarm := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].Alarm;
|
||||
"DB HMI_1".xMotStatInZeroPos := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].InZero;
|
||||
"DB HMI_1".xMotStatRunningSlowly := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].MotorStatus[#m].MovingSlowly;
|
||||
"DB HMI_1".xBoxFuseBurned := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].BoxStatus.BurnedFuse;
|
||||
"DB HMI_1".xBoxUndervoltage := "DB Gateway".N[#g].read.P[#p].MotorsBoxStatus[#b].BoxStatus.Undervoltage;
|
||||
IF ("DB HMI_1".xMotStatRunningFw OR "DB HMI_1".xMotStatRunningBw) AND "DB Cycle".Man THEN
|
||||
"DB HMI_1".xMotStatRunning := 1;
|
||||
ELSE
|
||||
"DB HMI_1".xMotStatRunning := 0;
|
||||
END_IF;
|
||||
END_FUNCTION
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -5,7 +5,7 @@
|
|||
"path": "."
|
||||
},
|
||||
{
|
||||
"path": "C:/Trabajo/SIDEL/13 - E5.007560 - Modifica O&U - SAE235/Reporte/ExportTia"
|
||||
"path": "../../../../../../Trabajo/VM/45 - HENKEL - VM Auto Changeover/ExportTia/PLC_TL25_Q1"
|
||||
}
|
||||
],
|
||||
"settings": {
|
||||
|
|
|
@ -1,11 +1,23 @@
|
|||
# generators/generate_md_tag_table.py
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
def generate_tag_table_markdown(data):
|
||||
"""Genera contenido Markdown para una tabla de tags."""
|
||||
md_lines = []
|
||||
table_name = data.get("block_name", "UnknownTagTable")
|
||||
tags = data.get("tags", [])
|
||||
block_number = data.get("block_number")
|
||||
block_type = data.get("block_type", "TagTable")
|
||||
|
||||
# Agregar línea de identificación del bloque al inicio
|
||||
if block_number and block_type:
|
||||
if block_type == "PlcTagTable" or block_type == "TagTable":
|
||||
md_lines.append(f"<!-- TAG{block_number} -->")
|
||||
else:
|
||||
md_lines.append(f"<!-- {block_type}{block_number} -->")
|
||||
elif block_type:
|
||||
md_lines.append(f"<!-- {block_type} -->")
|
||||
|
||||
md_lines.append(f"# Tag Table: {table_name}")
|
||||
md_lines.append("")
|
||||
|
@ -18,11 +30,13 @@ def generate_tag_table_markdown(data):
|
|||
datatype = tag.get("datatype", "N/A")
|
||||
address = tag.get("address", "N/A") or " "
|
||||
comment_raw = tag.get("comment")
|
||||
comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else ""
|
||||
comment = (
|
||||
comment_raw.replace("|", "\|").replace("\n", " ") if comment_raw else ""
|
||||
)
|
||||
md_lines.append(f"| `{name}` | `{datatype}` | `{address}` | {comment} |")
|
||||
md_lines.append("")
|
||||
else:
|
||||
md_lines.append("No tags found in this table.")
|
||||
md_lines.append("")
|
||||
|
||||
return md_lines
|
||||
return md_lines
|
||||
|
|
|
@ -1,46 +1,90 @@
|
|||
# generators/generate_md_udt.py
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from .generator_utils import format_scl_start_value # Importar utilidad necesaria
|
||||
from .generator_utils import format_scl_start_value # Importar utilidad necesaria
|
||||
|
||||
|
||||
def generate_markdown_member_rows(members, level=0):
|
||||
"""Genera filas Markdown para miembros de UDT (recursivo)."""
|
||||
md_rows = []; prefix = " " * level
|
||||
md_rows = []
|
||||
prefix = " " * level
|
||||
for member in members:
|
||||
name = member.get("name", "N/A"); datatype = member.get("datatype", "N/A")
|
||||
name = member.get("name", "N/A")
|
||||
datatype = member.get("datatype", "N/A")
|
||||
start_value_raw = member.get("start_value")
|
||||
start_value_fmt = format_scl_start_value(start_value_raw, datatype) if start_value_raw is not None else ""
|
||||
comment_raw = member.get("comment"); comment = comment_raw.replace('|', '\|').replace('\n', ' ') if comment_raw else ""
|
||||
md_rows.append(f"| {prefix}`{name}` | `{datatype}` | `{start_value_fmt}` | {comment} |")
|
||||
start_value_fmt = (
|
||||
format_scl_start_value(start_value_raw, datatype)
|
||||
if start_value_raw is not None
|
||||
else ""
|
||||
)
|
||||
comment_raw = member.get("comment")
|
||||
comment = (
|
||||
comment_raw.replace("|", "\|").replace("\n", " ") if comment_raw else ""
|
||||
)
|
||||
md_rows.append(
|
||||
f"| {prefix}`{name}` | `{datatype}` | `{start_value_fmt}` | {comment} |"
|
||||
)
|
||||
children = member.get("children")
|
||||
if children: md_rows.extend(generate_markdown_member_rows(children, level + 1))
|
||||
if children:
|
||||
md_rows.extend(generate_markdown_member_rows(children, level + 1))
|
||||
array_elements = member.get("array_elements")
|
||||
if array_elements:
|
||||
base_type_for_init = datatype
|
||||
if isinstance(datatype, str) and datatype.lower().startswith("array["):
|
||||
match = re.match(r"(Array\[.*\]\s+of\s+)(.*)", datatype, re.IGNORECASE)
|
||||
if match: base_type_for_init = match.group(2).strip()
|
||||
md_rows.append(f"| {prefix} *(Initial Values)* | | | |")
|
||||
try:
|
||||
indices_numeric = {int(k): v for k, v in array_elements.items()}
|
||||
sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())]
|
||||
except ValueError: sorted_indices_str = sorted(array_elements.keys())
|
||||
for idx_str in sorted_indices_str:
|
||||
val_raw = array_elements[idx_str]
|
||||
val_fmt = format_scl_start_value(val_raw, base_type_for_init) if val_raw is not None else ""
|
||||
md_rows.append(f"| {prefix} `[{idx_str}]` | | `{val_fmt}` | |")
|
||||
base_type_for_init = datatype
|
||||
if isinstance(datatype, str) and datatype.lower().startswith("array["):
|
||||
match = re.match(r"(Array\[.*\]\s+of\s+)(.*)", datatype, re.IGNORECASE)
|
||||
if match:
|
||||
base_type_for_init = match.group(2).strip()
|
||||
md_rows.append(f"| {prefix} *(Initial Values)* | | | |")
|
||||
try:
|
||||
indices_numeric = {int(k): v for k, v in array_elements.items()}
|
||||
sorted_indices_str = [str(k) for k in sorted(indices_numeric.keys())]
|
||||
except ValueError:
|
||||
sorted_indices_str = sorted(array_elements.keys())
|
||||
for idx_str in sorted_indices_str:
|
||||
val_raw = array_elements[idx_str]
|
||||
val_fmt = (
|
||||
format_scl_start_value(val_raw, base_type_for_init)
|
||||
if val_raw is not None
|
||||
else ""
|
||||
)
|
||||
md_rows.append(
|
||||
f"| {prefix} `[{idx_str}]` | | `{val_fmt}` | |"
|
||||
)
|
||||
return md_rows
|
||||
|
||||
|
||||
def generate_udt_markdown(data):
|
||||
"""Genera contenido Markdown para un UDT."""
|
||||
md_lines = []; udt_name = data.get("block_name", "UnknownUDT"); udt_comment = data.get("block_comment", "")
|
||||
md_lines.append(f"# UDT: {udt_name}"); md_lines.append("")
|
||||
if udt_comment: md_lines.append(f"**Comment:**"); [md_lines.append(f"> {line}") for line in udt_comment.splitlines()]; md_lines.append("")
|
||||
md_lines = []
|
||||
udt_name = data.get("block_name", "UnknownUDT")
|
||||
udt_comment = data.get("block_comment", "")
|
||||
block_number = data.get("block_number")
|
||||
block_type = data.get("block_type", "UDT")
|
||||
|
||||
# Agregar línea de identificación del bloque al inicio
|
||||
if block_number and block_type:
|
||||
if block_type == "PlcUDT" or block_type == "UDT":
|
||||
md_lines.append(f"<!-- UDT{block_number} -->")
|
||||
else:
|
||||
md_lines.append(f"<!-- {block_type}{block_number} -->")
|
||||
elif block_type:
|
||||
md_lines.append(f"<!-- {block_type} -->")
|
||||
|
||||
md_lines.append(f"# UDT: {udt_name}")
|
||||
md_lines.append("")
|
||||
if udt_comment:
|
||||
md_lines.append(f"**Comment:**")
|
||||
[md_lines.append(f"> {line}") for line in udt_comment.splitlines()]
|
||||
md_lines.append("")
|
||||
members = data.get("interface", {}).get("None", [])
|
||||
if members:
|
||||
md_lines.append("## Members"); md_lines.append("")
|
||||
md_lines.append("| Name | Datatype | Start Value | Comment |"); md_lines.append("|---|---|---|---|")
|
||||
md_lines.append("## Members")
|
||||
md_lines.append("")
|
||||
md_lines.append("| Name | Datatype | Start Value | Comment |")
|
||||
md_lines.append("|---|---|---|---|")
|
||||
md_lines.extend(generate_markdown_member_rows(members))
|
||||
md_lines.append("")
|
||||
else: md_lines.append("No members found in the UDT interface."); md_lines.append("")
|
||||
return md_lines
|
||||
else:
|
||||
md_lines.append("No members found in the UDT interface.")
|
||||
md_lines.append("")
|
||||
return md_lines
|
||||
|
|
|
@ -7,7 +7,6 @@ from .generator_utils import format_variable_name, generate_scl_declarations
|
|||
SCL_SUFFIX = "_sympy_processed"
|
||||
|
||||
|
||||
# ... (_generate_scl_header sin cambios)...
|
||||
def _generate_scl_header(data, scl_block_name):
|
||||
scl_output = []
|
||||
block_type = data.get("block_type", "Unknown")
|
||||
|
@ -19,6 +18,20 @@ def _generate_scl_header(data, scl_block_name):
|
|||
scl_block_keyword = "FUNCTION"
|
||||
elif block_type == "OB":
|
||||
scl_block_keyword = "ORGANIZATION_BLOCK"
|
||||
|
||||
# Agregar línea de identificación del bloque al inicio
|
||||
if block_number and block_type:
|
||||
if block_type == "FB":
|
||||
scl_output.append(f"// FB{block_number}")
|
||||
elif block_type == "FC":
|
||||
scl_output.append(f"// FC{block_number}")
|
||||
elif block_type == "OB":
|
||||
scl_output.append(f"// OB{block_number}")
|
||||
else:
|
||||
scl_output.append(f"// {block_type}{block_number}")
|
||||
elif block_type:
|
||||
scl_output.append(f"// {block_type}")
|
||||
|
||||
scl_output.append(f"// Block Type: {block_type}")
|
||||
if block_name != scl_block_name:
|
||||
scl_output.append(f"// Block Name (Original): {block_name}")
|
||||
|
@ -188,7 +201,9 @@ def _generate_scl_body(networks):
|
|||
scl_output.append(f" // --- BEGIN STL Network {i+1} ---")
|
||||
scl_output.append(f" ```stl ")
|
||||
[
|
||||
scl_output.append(f" {stl_line}") # scl_output.append(f" // {stl_line}")
|
||||
scl_output.append(
|
||||
f" {stl_line}"
|
||||
) # scl_output.append(f" // {stl_line}")
|
||||
for stl_line in raw_stl_code.splitlines()
|
||||
]
|
||||
scl_output.append(f" ``` ")
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
# No necesita importar json/os aquí, lo hará generate_scl_declarations
|
||||
from .generator_utils import format_variable_name, generate_scl_declarations
|
||||
|
||||
|
||||
# Modificar _generate_scl_header si es necesario, pero parece ok
|
||||
def _generate_scl_header(data, scl_block_name):
|
||||
# ... (código sin cambios) ...
|
||||
|
@ -11,32 +12,60 @@ def _generate_scl_header(data, scl_block_name):
|
|||
block_name = data.get("block_name", "UnknownBlock")
|
||||
block_number = data.get("block_number")
|
||||
block_comment = data.get("block_comment", "")
|
||||
|
||||
# Agregar línea de identificación del bloque al inicio
|
||||
if block_number and block_type:
|
||||
if block_type == "GlobalDB":
|
||||
scl_output.append(f"// DB{block_number}")
|
||||
elif block_type == "InstanceDB":
|
||||
scl_output.append(f"// DB{block_number}")
|
||||
else:
|
||||
# Para otros tipos de DB
|
||||
scl_output.append(f"// DB{block_number}")
|
||||
elif block_type:
|
||||
scl_output.append(f"// {block_type}")
|
||||
|
||||
scl_output.append(f"// Block Type: {block_type}")
|
||||
if block_name != scl_block_name: scl_output.append(f"// Block Name (Original): {block_name}")
|
||||
if block_number: scl_output.append(f"// Block Number: {block_number}")
|
||||
if block_comment: scl_output.append(f"// Block Comment:"); [scl_output.append(f"// {line}") for line in block_comment.splitlines()]
|
||||
scl_output.append(""); scl_output.append(f'DATA_BLOCK "{scl_block_name}"'); scl_output.append("{ S7_Optimized_Access := 'TRUE' }")
|
||||
scl_output.append("VERSION : 0.1"); scl_output.append("")
|
||||
if block_name != scl_block_name:
|
||||
scl_output.append(f"// Block Name (Original): {block_name}")
|
||||
if block_number:
|
||||
scl_output.append(f"// Block Number: {block_number}")
|
||||
if block_comment:
|
||||
scl_output.append(f"// Block Comment:")
|
||||
[scl_output.append(f"// {line}") for line in block_comment.splitlines()]
|
||||
scl_output.append("")
|
||||
scl_output.append(f'DATA_BLOCK "{scl_block_name}"')
|
||||
scl_output.append("{ S7_Optimized_Access := 'TRUE' }")
|
||||
scl_output.append("VERSION : 0.1")
|
||||
scl_output.append("")
|
||||
return scl_output
|
||||
|
||||
|
||||
# Modificar _generate_scl_interface para pasar project_root_dir
|
||||
def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento
|
||||
def _generate_scl_interface(interface_data, project_root_dir): # <-- Nuevo argumento
|
||||
"""Genera la sección VAR para DB (basada en 'Static')."""
|
||||
scl_output = []
|
||||
static_vars = interface_data.get("Static", [])
|
||||
if static_vars:
|
||||
scl_output.append("VAR")
|
||||
# Pasar project_root_dir a generate_scl_declarations
|
||||
scl_output.extend(generate_scl_declarations(static_vars, indent_level=1, project_root_dir=project_root_dir)) # <-- Pasar ruta raíz
|
||||
scl_output.extend(
|
||||
generate_scl_declarations(
|
||||
static_vars, indent_level=1, project_root_dir=project_root_dir
|
||||
)
|
||||
) # <-- Pasar ruta raíz
|
||||
scl_output.append("END_VAR")
|
||||
else:
|
||||
print("Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB.")
|
||||
scl_output.append("VAR\nEND_VAR") # Añadir vacío
|
||||
print(
|
||||
"Advertencia: No se encontró sección 'Static' o está vacía en la interfaz del DB."
|
||||
)
|
||||
scl_output.append("VAR\nEND_VAR") # Añadir vacío
|
||||
scl_output.append("")
|
||||
return scl_output
|
||||
|
||||
|
||||
# Modificar generate_scl_for_db para aceptar y pasar project_root_dir
|
||||
def generate_scl_for_db(data, project_root_dir): # <-- Nuevo argumento
|
||||
def generate_scl_for_db(data, project_root_dir): # <-- Nuevo argumento
|
||||
"""Genera el contenido SCL completo para un DATA_BLOCK."""
|
||||
scl_output = []
|
||||
scl_block_name = format_variable_name(data.get("block_name", "UnknownDB"))
|
||||
|
@ -45,10 +74,12 @@ def generate_scl_for_db(data, project_root_dir): # <-- Nuevo argumento
|
|||
|
||||
interface_data = data.get("interface", {})
|
||||
# Pasar project_root_dir a _generate_scl_interface
|
||||
scl_output.extend(_generate_scl_interface(interface_data, project_root_dir)) # <-- Pasar ruta raíz
|
||||
scl_output.extend(
|
||||
_generate_scl_interface(interface_data, project_root_dir)
|
||||
) # <-- Pasar ruta raíz
|
||||
|
||||
scl_output.append("BEGIN")
|
||||
scl_output.append(" // Data Blocks have no executable code")
|
||||
scl_output.append("END_DATA_BLOCK")
|
||||
|
||||
return scl_output
|
||||
return scl_output
|
||||
|
|
|
@ -0,0 +1,191 @@
|
|||
# ToUpload/parsers/parse_block_header.py
|
||||
# -*- coding: utf-8 -*-
|
||||
from lxml import etree
|
||||
import os
|
||||
|
||||
# Importar desde las utilidades del parser
|
||||
from .parser_utils import ns, get_multilingual_text
|
||||
|
||||
|
||||
def parse_block_header_from_xml(xml_filepath):
|
||||
"""
|
||||
Extrae información del header del bloque desde un archivo XML de TIA Portal.
|
||||
|
||||
Args:
|
||||
xml_filepath (str): Ruta al archivo XML
|
||||
|
||||
Returns:
|
||||
dict: Diccionario con información del bloque:
|
||||
{
|
||||
'block_type': 'FC' | 'FB' | 'DB' | 'UDT' | 'PlcTagTable',
|
||||
'block_number': str | None,
|
||||
'block_name': str | None,
|
||||
'programming_language': str | None
|
||||
}
|
||||
"""
|
||||
if not os.path.exists(xml_filepath):
|
||||
return None
|
||||
|
||||
try:
|
||||
tree = etree.parse(xml_filepath)
|
||||
root = tree.getroot()
|
||||
|
||||
# Buscar diferentes tipos de bloques
|
||||
block_info = {
|
||||
"block_type": None,
|
||||
"block_number": None,
|
||||
"block_name": None,
|
||||
"programming_language": None,
|
||||
}
|
||||
|
||||
# 1. Function (FC)
|
||||
fc_node = root.find(".//SW.Blocks.FC")
|
||||
if fc_node is not None:
|
||||
block_info["block_type"] = "FC"
|
||||
block_info.update(_extract_common_attributes(fc_node))
|
||||
return block_info
|
||||
|
||||
# 2. Function Block (FB)
|
||||
fb_node = root.find(".//SW.Blocks.FB")
|
||||
if fb_node is not None:
|
||||
block_info["block_type"] = "FB"
|
||||
block_info.update(_extract_common_attributes(fb_node))
|
||||
return block_info
|
||||
|
||||
# 3. Organization Block (OB)
|
||||
ob_node = root.find(".//SW.Blocks.OB")
|
||||
if ob_node is not None:
|
||||
block_info["block_type"] = "OB"
|
||||
block_info.update(_extract_common_attributes(ob_node))
|
||||
return block_info
|
||||
|
||||
# 4. Data Block (DB) - Global
|
||||
db_node = root.find(".//SW.Blocks.GlobalDB")
|
||||
if db_node is not None:
|
||||
block_info["block_type"] = "GlobalDB"
|
||||
block_info.update(_extract_common_attributes(db_node))
|
||||
return block_info
|
||||
|
||||
# 5. Data Block (DB) - Instance
|
||||
idb_node = root.find(".//SW.Blocks.InstanceDB")
|
||||
if idb_node is not None:
|
||||
block_info["block_type"] = "InstanceDB"
|
||||
block_info.update(_extract_common_attributes(idb_node))
|
||||
return block_info
|
||||
|
||||
# 6. User Defined Type (UDT)
|
||||
udt_node = root.find(".//SW.Types.PlcStruct")
|
||||
if udt_node is not None:
|
||||
block_info["block_type"] = "PlcUDT"
|
||||
block_info.update(_extract_common_attributes(udt_node))
|
||||
return block_info
|
||||
|
||||
# 7. Tag Table
|
||||
tag_table_node = root.find(".//SW.Tags.PlcTagTable")
|
||||
if tag_table_node is not None:
|
||||
block_info["block_type"] = "PlcTagTable"
|
||||
block_info.update(_extract_common_attributes(tag_table_node))
|
||||
return block_info
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error parsing block header from {xml_filepath}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _extract_common_attributes(block_node):
|
||||
"""
|
||||
Extrae atributos comunes de un nodo de bloque.
|
||||
|
||||
Args:
|
||||
block_node: Nodo XML del bloque
|
||||
|
||||
Returns:
|
||||
dict: Diccionario con atributos extraídos
|
||||
"""
|
||||
attributes = {}
|
||||
|
||||
# Buscar AttributeList
|
||||
attr_list = block_node.find("AttributeList")
|
||||
if attr_list is not None:
|
||||
# Nombre del bloque
|
||||
name_elem = attr_list.find("Name")
|
||||
if name_elem is not None:
|
||||
attributes["block_name"] = name_elem.text
|
||||
|
||||
# Número del bloque
|
||||
number_elem = attr_list.find("Number")
|
||||
if number_elem is not None:
|
||||
attributes["block_number"] = str(number_elem.text)
|
||||
|
||||
# Lenguaje de programación
|
||||
lang_elem = attr_list.find("ProgrammingLanguage")
|
||||
if lang_elem is not None:
|
||||
attributes["programming_language"] = lang_elem.text
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def generate_block_header_comment(block_info):
|
||||
"""
|
||||
Genera el comentario de header del bloque basado en la información extraída.
|
||||
|
||||
Args:
|
||||
block_info (dict): Información del bloque extraída del XML
|
||||
|
||||
Returns:
|
||||
str: Línea de comentario del header (ej: "// FC1032")
|
||||
"""
|
||||
if not block_info or not block_info.get("block_type"):
|
||||
return None
|
||||
|
||||
block_type = block_info["block_type"]
|
||||
block_number = block_info.get("block_number")
|
||||
|
||||
# Mapear tipos de bloque a abreviaciones
|
||||
type_mapping = {
|
||||
"FC": "FC",
|
||||
"FB": "FB",
|
||||
"OB": "OB",
|
||||
"GlobalDB": "DB",
|
||||
"InstanceDB": "DB",
|
||||
"PlcUDT": "UDT",
|
||||
"PlcTagTable": "TAG",
|
||||
}
|
||||
|
||||
abbreviated_type = type_mapping.get(block_type, block_type)
|
||||
|
||||
if block_number:
|
||||
return f"// {abbreviated_type}{block_number}"
|
||||
else:
|
||||
return f"// {abbreviated_type}"
|
||||
|
||||
|
||||
# Función de conveniencia para uso directo
|
||||
def get_block_header_comment_from_xml(xml_filepath):
|
||||
"""
|
||||
Función de conveniencia que extrae la información del bloque y genera el comentario de header.
|
||||
|
||||
Args:
|
||||
xml_filepath (str): Ruta al archivo XML
|
||||
|
||||
Returns:
|
||||
str | None: Comentario de header (ej: "// FC1032") o None si no se pudo extraer
|
||||
"""
|
||||
block_info = parse_block_header_from_xml(xml_filepath)
|
||||
if block_info:
|
||||
return generate_block_header_comment(block_info)
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Ejemplo de uso para testing
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
xml_file = sys.argv[1]
|
||||
header = get_block_header_comment_from_xml(xml_file)
|
||||
print(f"Header for {xml_file}: {header}")
|
||||
else:
|
||||
print("Usage: python parse_block_header.py <xml_file>")
|
|
@ -15,5 +15,5 @@
|
|||
"xref_source_subdir": "source"
|
||||
},
|
||||
"level3": {},
|
||||
"working_directory": "C:\\Trabajo\\SIDEL\\09 - SAE452 - Diet as Regular - San Giorgio in Bosco\\Reporte\\TiaExport"
|
||||
"working_directory": "D:\\Trabajo\\VM\\45 - HENKEL - VM Auto Changeover\\ExportTia"
|
||||
}
|
|
@ -64,5 +64,11 @@
|
|||
"short_description": "Sin descripción corta.",
|
||||
"long_description": "",
|
||||
"hidden": false
|
||||
},
|
||||
"test_parser.py": {
|
||||
"display_name": "test_parser",
|
||||
"short_description": "Sin descripción corta.",
|
||||
"long_description": "",
|
||||
"hidden": false
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Script de prueba para verificar que los índices de arrays se capturen correctamente en LAD/FBD."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Añadir el directorio padre al path para los imports
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
from x1_to_json import convert_xml_to_json
|
||||
|
||||
if __name__ == "__main__":
|
||||
xml_file = ".example/FC TT Devices.xml"
|
||||
json_file = ".example/FC_TT_Devices_test.json"
|
||||
|
||||
print(f"Probando conversión de {xml_file} a {json_file}...")
|
||||
|
||||
try:
|
||||
success = convert_xml_to_json(xml_file, json_file)
|
||||
if success:
|
||||
print("Conversión exitosa!")
|
||||
|
||||
# Buscar patrones de arrays en el JSON generado
|
||||
with open(json_file, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Buscar di0.x con índices
|
||||
if '"di0.x"[1]' in content:
|
||||
print(
|
||||
"✅ ÉXITO: Se encontró di0.x[1] - los índices de arrays se están capturando correctamente!"
|
||||
)
|
||||
elif '"di0.x"[]' in content:
|
||||
print("❌ PROBLEMA: Se encontró di0.x[] - los índices están vacíos")
|
||||
elif '"di0.x"' in content:
|
||||
print(
|
||||
"❌ PROBLEMA: Se encontró di0.x sin índices - el fix no está funcionando"
|
||||
)
|
||||
else:
|
||||
print("⚠️ No se encontró di0.x en el contenido")
|
||||
|
||||
else:
|
||||
print("Error en la conversión")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"path": "C:\\Trabajo\\SIDEL\\09 - SAE452 - Diet as Regular - San Giorgio in Bosco\\Reporte\\TiaExport",
|
||||
"path": "D:\\Trabajo\\VM\\45 - HENKEL - VM Auto Changeover\\ExportTia",
|
||||
"history": [
|
||||
"C:\\Trabajo\\SIDEL\\09 - SAE452 - Diet as Regular - San Giorgio in Bosco\\Reporte\\TiaExport",
|
||||
"D:\\Trabajo\\VM\\45 - HENKEL - VM Auto Changeover\\ExportTia"
|
||||
"D:\\Trabajo\\VM\\45 - HENKEL - VM Auto Changeover\\ExportTia",
|
||||
"C:\\Trabajo\\SIDEL\\09 - SAE452 - Diet as Regular - San Giorgio in Bosco\\Reporte\\TiaExport"
|
||||
]
|
||||
}
|
|
@ -18,6 +18,7 @@ import traceback
|
|||
import json
|
||||
import datetime # <-- NUEVO: Para timestamps
|
||||
import shutil # <-- ADDED: Import shutil for file copying
|
||||
import re # <-- ADDED: Import regex for block header processing
|
||||
|
||||
script_root = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
@ -162,6 +163,217 @@ def check_skip_status(
|
|||
return status
|
||||
|
||||
|
||||
# --- FUNCIÓN AUXILIAR PARA AGREGAR HEADER A SCL COPIADOS ---
|
||||
def add_block_header_to_scl(src_path, dest_path, log_f):
|
||||
"""
|
||||
Copia un archivo SCL agregando la línea de identificación del bloque al inicio.
|
||||
Extrae el tipo y número de bloque del XML correspondiente si existe.
|
||||
"""
|
||||
try:
|
||||
log_message(
|
||||
f" Procesando archivo SCL: {os.path.basename(src_path)}",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
|
||||
# Leer el archivo SCL original
|
||||
with open(src_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Verificar si ya tiene header correcto en la primera línea
|
||||
lines = content.split("\n")
|
||||
if lines and lines[0].strip():
|
||||
first_line = lines[0].strip()
|
||||
if re.match(r"^//\s*(FB|FC|DB|UDT|TAG|OB)\d+\s*$", first_line):
|
||||
# Ya tiene el header correcto, no necesitamos agregarlo
|
||||
log_message(
|
||||
f" ✓ Archivo ya tiene header de bloque: {first_line}",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
shutil.copy2(src_path, dest_path)
|
||||
return True
|
||||
|
||||
# Intentar encontrar el XML correspondiente
|
||||
xml_path = src_path.replace(".scl", ".xml")
|
||||
header_comment = None
|
||||
|
||||
# Si no existe en la misma carpeta, buscar en estructura paralela ProgramBlocks_XML
|
||||
if not os.path.exists(xml_path):
|
||||
# Intentar convertir ruta de ProgramBlocks_SCL a ProgramBlocks_XML
|
||||
if "ProgramBlocks_SCL" in src_path:
|
||||
xml_path = src_path.replace(
|
||||
"ProgramBlocks_SCL", "ProgramBlocks_XML"
|
||||
).replace(".scl", ".xml")
|
||||
# O viceversa, si está en otra estructura
|
||||
elif "scl_output" in src_path:
|
||||
# Para archivos ya copiados en scl_output, buscar en ProgramBlocks_XML
|
||||
# Extraer el nombre base y buscar recursivamente en el proyecto
|
||||
base_name = os.path.splitext(os.path.basename(src_path))[0]
|
||||
project_root = src_path
|
||||
# Subir hasta encontrar la raíz del proyecto (donde están las carpetas ProgramBlocks_*)
|
||||
while project_root and not any(
|
||||
os.path.exists(os.path.join(project_root, d))
|
||||
for d in ["ProgramBlocks_XML", "ProgramBlocks_SCL"]
|
||||
):
|
||||
parent = os.path.dirname(project_root)
|
||||
if parent == project_root: # Llegamos a la raíz del sistema
|
||||
break
|
||||
project_root = parent
|
||||
|
||||
if project_root:
|
||||
# Buscar el XML correspondiente recursivamente
|
||||
xml_search_pattern = os.path.join(
|
||||
project_root, "**", f"{base_name}.xml"
|
||||
)
|
||||
import glob
|
||||
|
||||
xml_candidates = glob.glob(xml_search_pattern, recursive=True)
|
||||
if xml_candidates:
|
||||
xml_path = xml_candidates[0] # Tomar el primero encontrado
|
||||
|
||||
log_message(f" Buscando XML en: {xml_path}", log_f, also_print=False)
|
||||
|
||||
if os.path.exists(xml_path):
|
||||
# Usar el nuevo parser para extraer información del XML
|
||||
try:
|
||||
from parsers.parse_block_header import get_block_header_comment_from_xml
|
||||
|
||||
header_comment = get_block_header_comment_from_xml(xml_path)
|
||||
if header_comment:
|
||||
log_message(
|
||||
f" Extraído header del XML: {header_comment}",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
except Exception as e:
|
||||
log_message(
|
||||
f" Error extrayendo header del XML {xml_path}: {e}",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
else:
|
||||
log_message(
|
||||
f" XML no encontrado en: {xml_path}", log_f, also_print=False
|
||||
)
|
||||
|
||||
# Si no se pudo extraer del XML, intentar extraer del contenido SCL (fallback)
|
||||
if not header_comment:
|
||||
log_message(
|
||||
f" XML no encontrado o sin header válido, intentando extraer del contenido SCL",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
header_comment = _extract_header_from_scl_content(content, log_f)
|
||||
|
||||
# Escribir el archivo con el header
|
||||
with open(dest_path, "w", encoding="utf-8") as f:
|
||||
if header_comment:
|
||||
f.write(header_comment + "\n")
|
||||
f.write(content)
|
||||
|
||||
if header_comment:
|
||||
log_message(
|
||||
f" ✓ Agregado header: {header_comment}", log_f, also_print=False
|
||||
)
|
||||
else:
|
||||
log_message(
|
||||
f" ⚠ No se pudo determinar tipo/número de bloque, copiando sin header",
|
||||
log_f,
|
||||
also_print=False,
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
log_message(f" ✗ ERROR procesando archivo SCL: {e}", log_f)
|
||||
# Fallback: copia simple
|
||||
try:
|
||||
shutil.copy2(src_path, dest_path)
|
||||
log_message(
|
||||
f" ⚠ Fallback: copia simple realizada", log_f, also_print=False
|
||||
)
|
||||
return True
|
||||
except Exception as e2:
|
||||
log_message(f" ✗ ERROR en fallback de copia: {e2}", log_f)
|
||||
return False
|
||||
|
||||
|
||||
def _extract_header_from_scl_content(content, log_f):
|
||||
"""
|
||||
Función auxiliar para extraer header del contenido SCL como fallback.
|
||||
"""
|
||||
block_type = None
|
||||
block_number = None
|
||||
|
||||
# Buscar primero en comentarios ya existentes
|
||||
lines = content.split("\n")
|
||||
for line in lines[:15]: # Buscar en las primeras 15 líneas
|
||||
line_clean = line.strip()
|
||||
if line_clean.startswith("//"):
|
||||
# Buscar patrones como "// Block Number: 1051"
|
||||
if "Block Number:" in line_clean:
|
||||
match = re.search(r"Block Number:\s*(\d+)", line_clean)
|
||||
if match:
|
||||
block_number = match.group(1)
|
||||
elif "Block Type:" in line_clean:
|
||||
if "GlobalDB" in line_clean or "InstanceDB" in line_clean:
|
||||
block_type = "DB"
|
||||
elif "FB" in line_clean:
|
||||
block_type = "FB"
|
||||
elif "FC" in line_clean:
|
||||
block_type = "FC"
|
||||
elif "UDT" in line_clean or "PlcUDT" in line_clean:
|
||||
block_type = "UDT"
|
||||
elif "PlcTagTable" in line_clean or "TagTable" in line_clean:
|
||||
block_type = "TAG"
|
||||
|
||||
# Si no se encontró en comentarios, buscar en declaraciones de bloques
|
||||
if not block_type or not block_number:
|
||||
for line in lines:
|
||||
line_clean = line.strip()
|
||||
|
||||
# Buscar declaraciones de bloques
|
||||
if "FUNCTION_BLOCK" in line_clean and '"' in line_clean:
|
||||
block_type = "FB"
|
||||
match = re.search(r"FB[_]?(\d+)", line_clean, re.IGNORECASE)
|
||||
if match:
|
||||
block_number = match.group(1)
|
||||
break
|
||||
elif (
|
||||
"FUNCTION" in line_clean
|
||||
and '"' in line_clean
|
||||
and "FUNCTION_BLOCK" not in line_clean
|
||||
):
|
||||
block_type = "FC"
|
||||
match = re.search(r"FC[_]?(\d+)", line_clean, re.IGNORECASE)
|
||||
if match:
|
||||
block_number = match.group(1)
|
||||
break
|
||||
elif "DATA_BLOCK" in line_clean and '"' in line_clean:
|
||||
block_type = "DB"
|
||||
match = re.search(r"DB[_]?(\d+)", line_clean, re.IGNORECASE)
|
||||
if match:
|
||||
block_number = match.group(1)
|
||||
break
|
||||
elif "TYPE" in line_clean and '"' in line_clean:
|
||||
block_type = "UDT"
|
||||
match = re.search(r"UDT[_]?(\d+)", line_clean, re.IGNORECASE)
|
||||
if match:
|
||||
block_number = match.group(1)
|
||||
break
|
||||
|
||||
# Construir la línea de header
|
||||
if block_type and block_number:
|
||||
return f"// {block_type}{block_number}"
|
||||
elif block_type:
|
||||
return f"// {block_type}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# --- FIN FUNCIÓN AUXILIAR ---
|
||||
|
||||
|
||||
# --- FUNCIÓN DE LIMPIEZA (x7) ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
@ -807,22 +1019,59 @@ if __name__ == "__main__":
|
|||
# Check if a file with the same name was already generated from XML
|
||||
if os.path.exists(dest_scl_path):
|
||||
log_message(
|
||||
f" - Omitiendo copia de '{relative_scl_path}': Ya existe un archivo generado con el mismo nombre en el destino.",
|
||||
f" - Sobreescribiendo archivo existente: '{relative_scl_path}' (agregando cabecera si es necesario)",
|
||||
log_f,
|
||||
also_print=False,
|
||||
also_print=True,
|
||||
)
|
||||
skipped_scl_count += 1
|
||||
# En lugar de omitir, vamos a procesarlo para agregar la cabecera
|
||||
try:
|
||||
log_message(
|
||||
f" - Procesando '{relative_scl_path}' para verificar/agregar cabecera",
|
||||
log_f,
|
||||
also_print=True,
|
||||
)
|
||||
# Usar la función auxiliar que agrega el header del bloque
|
||||
success = add_block_header_to_scl(
|
||||
src_scl_path, dest_scl_path, log_f
|
||||
)
|
||||
if success:
|
||||
copied_scl_count += 1
|
||||
log_message(
|
||||
f" ✓ Procesado exitosamente",
|
||||
log_f,
|
||||
also_print=True,
|
||||
)
|
||||
else:
|
||||
log_message(
|
||||
f" - ERROR procesando '{relative_scl_path}'", log_f
|
||||
)
|
||||
except Exception as copy_err:
|
||||
log_message(
|
||||
f" - ERROR procesando '{relative_scl_path}': {copy_err}",
|
||||
log_f,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
log_message(
|
||||
f" - Copiando '{relative_scl_path}' a '{os.path.relpath(dest_scl_path, working_directory)}'",
|
||||
log_f,
|
||||
also_print=False,
|
||||
also_print=True, # Cambiado a True para ver en consola
|
||||
)
|
||||
shutil.copy2(
|
||||
src_scl_path, dest_scl_path
|
||||
) # copy2 preserves metadata
|
||||
copied_scl_count += 1
|
||||
# Usar la función auxiliar que agrega el header del bloque
|
||||
success = add_block_header_to_scl(
|
||||
src_scl_path, dest_scl_path, log_f
|
||||
)
|
||||
if success:
|
||||
copied_scl_count += 1
|
||||
log_message(
|
||||
f" ✓ Copiado exitosamente",
|
||||
log_f,
|
||||
also_print=True,
|
||||
)
|
||||
else:
|
||||
log_message(
|
||||
f" - ERROR procesando '{relative_scl_path}'", log_f
|
||||
)
|
||||
except Exception as copy_err:
|
||||
log_message(
|
||||
f" - ERROR copiando '{relative_scl_path}': {copy_err}",
|
||||
|
|
21098
data/log.txt
21098
data/log.txt
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue