Con autosize y autocentrado

This commit is contained in:
Miguel 2024-04-23 09:52:24 +02:00
parent b287417c22
commit f47ea30871
15 changed files with 6308 additions and 197 deletions

View File

@ -1,33 +1,7 @@
import xmltodict
import pandas as pd
import re
import sys
def save_json_to_xml(json_data, filename="DB_Structure.xml"):
"""
Convert JSON data to XML and save it to a file.
"""
xml_data = xmltodict.unparse({"root": json_data}, pretty=True)
with open(filename, "w") as xml_file:
xml_file.write(xml_data)
print(f"XML data saved to {filename}")
def save_dataframe_to_excel(df, filename="DB_Structure.xlsx"):
"""
Save the provided DataFrame to an Excel file.
"""
df.to_excel(filename, index=False)
print(f"Data saved to {filename}")
def save_dataframe_to_file(df, filename="DB_Structure.csv"):
"""
Save the provided DataFrame to a CSV file.
"""
df.to_csv(filename, index=False)
print(f"Data saved to {filename}")
type_sizes = {
"Byte": 1,
@ -225,19 +199,19 @@ def collect_data_for_table(
return collected_data
def convert_to_table(db_struct):
def initiate_conversion_to_table(db_struct):
offset_state = OffsetState()
return collect_data_for_table(db_struct, offset_state)
def display_as_table(dbs):
def convert_to_table(dbs):
"""
Convert collected DB data into a pandas DataFrame and display it.
"""
all_data = []
for db_name, db_content in dbs.items():
print(f"Processing DB: {db_name}")
db_data = convert_to_table(db_content)
db_data = initiate_conversion_to_table(db_content)
all_data.extend(db_data)
df = pd.DataFrame(all_data)

View File

@ -0,0 +1,178 @@
Nombre,Tipo,Offset,Size,Level,Dirección PLC,Comentario
MAIN,"""UDT SIPA SV Main""",0,-1,0,DBX0.0,
MAIN.N1,DInt,0,4,1,DBD0,.DB_IOT.USERLEVEL
MAIN.N2,String[81],4,83,1,DBX4.0,.DB_IOT.USERNAME
MAIN.N3,String[81],88,83,1,DBX88.0,.DB_IOT.NOME_RICETTA
MAIN.N4,Int,172,2,1,DBW172,.DB_IOT.NEXT_MAINT_CYCLES
MAIN.N5.V1,String[254],174,256,2,DBX174.0,
MAIN.N5.V2,String[254],430,256,2,DBX430.0,
MAIN.N5.V3,String[254],686,256,2,DBX686.0,
MAIN.N5.V4,String[254],942,256,2,DBX942.0,
MAIN.N5.V5,String[8],1198,10,2,DBX1198.0,
MAIN.N6,Array[1..3] of Real,1208,0,1,DBX1208.0,.DB_IOT.ELECTRIC_VOLTAGE_PHASE_D
MAIN.N6[1],Real,1208,4,2,DBD1208,.DB_IOT.ELECTRIC_VOLTAGE_PHASE_D
MAIN.N6[2],Real,1212,4,2,DBD1212,.DB_IOT.ELECTRIC_VOLTAGE_PHASE_D
MAIN.N6[3],Real,1216,4,2,DBD1216,.DB_IOT.ELECTRIC_VOLTAGE_PHASE_D
MAIN.N7,Array[1..3] of Real,1220,0,1,DBX1220.0,.DB_IOT.ELECTRIC_CURRENT_PHASE_D
MAIN.N7[1],Real,1220,4,2,DBD1220,.DB_IOT.ELECTRIC_CURRENT_PHASE_D
MAIN.N7[2],Real,1224,4,2,DBD1224,.DB_IOT.ELECTRIC_CURRENT_PHASE_D
MAIN.N7[3],Real,1228,4,2,DBD1228,.DB_IOT.ELECTRIC_CURRENT_PHASE_D
MAIN.N8,Real,1232,4,1,DBD1232,.DB_IOT.ELECTRIC_POWER_D
MAIN.N9,Real,1236,4,1,DBD1236,.DB_IOT.ELECTRIC_POWER_FACTOR_D
MAIN.N10,Real,1240,4,1,DBD1240,.DB_IOT.ELECTRIC_POWER_HOUR_D
MAIN.N11,Real,1244,4,1,DBD1244,.DB_IOT.ELECTRIC_POWER_WH
SECT1,"""UDT SIPA SV Section""",1248,-1,0,DBX1248.0,
SECT1.N1,DInt,1248,4,1,DBD1248,.DB_IOT.STATO_MACCHINA
SECT1.N2,DInt,1252,4,1,DBD1252,.DB_IOT.ALLARME_FERMO
SECT1.N3,DInt,1256,4,1,DBD1256,.DB_IOT.WARNING_ATTIVO (che compromette produzione)
SECT1.N4,Int,1260,2,1,DBW1260,.DB_IOT.STATO_OPERATIVO (Semaforo)
SECT1.N5,Int,1262,2,1,DBW1262,".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
SECT1.N6,DInt,1264,4,1,DBD1264,.DB_IOT.ALARM_STOP_NO
SECT1.N7.V1,DInt,1268,4,2,DBD1268,PIECES_TOT
SECT1.N7.V2,DInt,1272,4,2,DBD1272,PIECES_OK
SECT1.N7.V3,DInt,1276,4,2,DBD1276,PIECES_KO_1
SECT1.N7.V4,DInt,1280,4,2,DBD1280,PIECES_KO_2
SECT1.N7.V5,DInt,1284,4,2,DBD1284,PIECES_KO_3
SECT1.N7.V6,DInt,1288,4,2,DBD1288,PIECES_KO_4
SECT1.N7.V7,DInt,1292,4,2,DBD1292,PIECES_KO_5
SECT1.N7.V8,DInt,1296,4,2,DBD1296,PIECES_KO_6
SECT1.N7.V9,DInt,1300,4,2,DBD1300,PIECES_KO_7
SECT1.N7.V10,DInt,1304,4,2,DBD1304,PIECES_KO_8
SECT1.N7.V11,DInt,1308,4,2,DBD1308,PIECES_KO_9
SECT1.N7.V12,DInt,1312,4,2,DBD1312,PIECES_KO_10
SECT1.N7.V13,DInt,1316,4,2,DBD1316,T_ALARM_HOURS
SECT1.N7.V14,DInt,1320,4,2,DBD1320,T_DRY_CYCLE_HOURS
SECT1.N7.V15,DInt,1324,4,2,DBD1324,T_POWERED_HOURS
SECT1.N7.V16,DInt,1328,4,2,DBD1328,T_PRODUCT_100_HOURS
SECT1.N7.V17,DInt,1332,4,2,DBD1332,T_PRODUCT_0_HOURS
SECT1.N7.V18,DInt,1336,4,2,DBD1336,T_STOP_HOURS
SECT1.N7.V19,Int,1340,2,2,DBW1340,T_ALARM_MINUTES
SECT1.N7.V20,Int,1342,2,2,DBW1342,T_DRY_CYCLE_MINUTES
SECT1.N7.V21,Int,1344,2,2,DBW1344,T_POWERED_MINUTES
SECT1.N7.V22,Int,1346,2,2,DBW1346,T_PRODUCT_100_MINUTES
SECT1.N7.V23,Int,1348,2,2,DBW1348,T_PRODUCT_0_MINUTES
SECT1.N7.V24,Int,1350,2,2,DBW1350,T_STOP_MINUTES
SECT2,"""UDT SIPA SV Section""",1352,-1,0,DBX1352.0,
SECT2.N1,DInt,1352,4,1,DBD1352,.DB_IOT.STATO_MACCHINA
SECT2.N2,DInt,1356,4,1,DBD1356,.DB_IOT.ALLARME_FERMO
SECT2.N3,DInt,1360,4,1,DBD1360,.DB_IOT.WARNING_ATTIVO (che compromette produzione)
SECT2.N4,Int,1364,2,1,DBW1364,.DB_IOT.STATO_OPERATIVO (Semaforo)
SECT2.N5,Int,1366,2,1,DBW1366,".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
SECT2.N6,DInt,1368,4,1,DBD1368,.DB_IOT.ALARM_STOP_NO
SECT2.N7.V1,DInt,1372,4,2,DBD1372,PIECES_TOT
SECT2.N7.V2,DInt,1376,4,2,DBD1376,PIECES_OK
SECT2.N7.V3,DInt,1380,4,2,DBD1380,PIECES_KO_1
SECT2.N7.V4,DInt,1384,4,2,DBD1384,PIECES_KO_2
SECT2.N7.V5,DInt,1388,4,2,DBD1388,PIECES_KO_3
SECT2.N7.V6,DInt,1392,4,2,DBD1392,PIECES_KO_4
SECT2.N7.V7,DInt,1396,4,2,DBD1396,PIECES_KO_5
SECT2.N7.V8,DInt,1400,4,2,DBD1400,PIECES_KO_6
SECT2.N7.V9,DInt,1404,4,2,DBD1404,PIECES_KO_7
SECT2.N7.V10,DInt,1408,4,2,DBD1408,PIECES_KO_8
SECT2.N7.V11,DInt,1412,4,2,DBD1412,PIECES_KO_9
SECT2.N7.V12,DInt,1416,4,2,DBD1416,PIECES_KO_10
SECT2.N7.V13,DInt,1420,4,2,DBD1420,T_ALARM_HOURS
SECT2.N7.V14,DInt,1424,4,2,DBD1424,T_DRY_CYCLE_HOURS
SECT2.N7.V15,DInt,1428,4,2,DBD1428,T_POWERED_HOURS
SECT2.N7.V16,DInt,1432,4,2,DBD1432,T_PRODUCT_100_HOURS
SECT2.N7.V17,DInt,1436,4,2,DBD1436,T_PRODUCT_0_HOURS
SECT2.N7.V18,DInt,1440,4,2,DBD1440,T_STOP_HOURS
SECT2.N7.V19,Int,1444,2,2,DBW1444,T_ALARM_MINUTES
SECT2.N7.V20,Int,1446,2,2,DBW1446,T_DRY_CYCLE_MINUTES
SECT2.N7.V21,Int,1448,2,2,DBW1448,T_POWERED_MINUTES
SECT2.N7.V22,Int,1450,2,2,DBW1450,T_PRODUCT_100_MINUTES
SECT2.N7.V23,Int,1452,2,2,DBW1452,T_PRODUCT_0_MINUTES
SECT2.N7.V24,Int,1454,2,2,DBW1454,T_STOP_MINUTES
SECT3,"""UDT SIPA SV Section""",1456,-1,0,DBX1456.0,
SECT3.N1,DInt,1456,4,1,DBD1456,.DB_IOT.STATO_MACCHINA
SECT3.N2,DInt,1460,4,1,DBD1460,.DB_IOT.ALLARME_FERMO
SECT3.N3,DInt,1464,4,1,DBD1464,.DB_IOT.WARNING_ATTIVO (che compromette produzione)
SECT3.N4,Int,1468,2,1,DBW1468,.DB_IOT.STATO_OPERATIVO (Semaforo)
SECT3.N5,Int,1470,2,1,DBW1470,".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
SECT3.N6,DInt,1472,4,1,DBD1472,.DB_IOT.ALARM_STOP_NO
SECT3.N7.V1,DInt,1476,4,2,DBD1476,PIECES_TOT
SECT3.N7.V2,DInt,1480,4,2,DBD1480,PIECES_OK
SECT3.N7.V3,DInt,1484,4,2,DBD1484,PIECES_KO_1
SECT3.N7.V4,DInt,1488,4,2,DBD1488,PIECES_KO_2
SECT3.N7.V5,DInt,1492,4,2,DBD1492,PIECES_KO_3
SECT3.N7.V6,DInt,1496,4,2,DBD1496,PIECES_KO_4
SECT3.N7.V7,DInt,1500,4,2,DBD1500,PIECES_KO_5
SECT3.N7.V8,DInt,1504,4,2,DBD1504,PIECES_KO_6
SECT3.N7.V9,DInt,1508,4,2,DBD1508,PIECES_KO_7
SECT3.N7.V10,DInt,1512,4,2,DBD1512,PIECES_KO_8
SECT3.N7.V11,DInt,1516,4,2,DBD1516,PIECES_KO_9
SECT3.N7.V12,DInt,1520,4,2,DBD1520,PIECES_KO_10
SECT3.N7.V13,DInt,1524,4,2,DBD1524,T_ALARM_HOURS
SECT3.N7.V14,DInt,1528,4,2,DBD1528,T_DRY_CYCLE_HOURS
SECT3.N7.V15,DInt,1532,4,2,DBD1532,T_POWERED_HOURS
SECT3.N7.V16,DInt,1536,4,2,DBD1536,T_PRODUCT_100_HOURS
SECT3.N7.V17,DInt,1540,4,2,DBD1540,T_PRODUCT_0_HOURS
SECT3.N7.V18,DInt,1544,4,2,DBD1544,T_STOP_HOURS
SECT3.N7.V19,Int,1548,2,2,DBW1548,T_ALARM_MINUTES
SECT3.N7.V20,Int,1550,2,2,DBW1550,T_DRY_CYCLE_MINUTES
SECT3.N7.V21,Int,1552,2,2,DBW1552,T_POWERED_MINUTES
SECT3.N7.V22,Int,1554,2,2,DBW1554,T_PRODUCT_100_MINUTES
SECT3.N7.V23,Int,1556,2,2,DBW1556,T_PRODUCT_0_MINUTES
SECT3.N7.V24,Int,1558,2,2,DBW1558,T_STOP_MINUTES
SECT4,"""UDT SIPA SV Section""",1560,-1,0,DBX1560.0,
SECT4.N1,DInt,1560,4,1,DBD1560,.DB_IOT.STATO_MACCHINA
SECT4.N2,DInt,1564,4,1,DBD1564,.DB_IOT.ALLARME_FERMO
SECT4.N3,DInt,1568,4,1,DBD1568,.DB_IOT.WARNING_ATTIVO (che compromette produzione)
SECT4.N4,Int,1572,2,1,DBW1572,.DB_IOT.STATO_OPERATIVO (Semaforo)
SECT4.N5,Int,1574,2,1,DBW1574,".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
SECT4.N6,DInt,1576,4,1,DBD1576,.DB_IOT.ALARM_STOP_NO
SECT4.N7.V1,DInt,1580,4,2,DBD1580,PIECES_TOT
SECT4.N7.V2,DInt,1584,4,2,DBD1584,PIECES_OK
SECT4.N7.V3,DInt,1588,4,2,DBD1588,PIECES_KO_1
SECT4.N7.V4,DInt,1592,4,2,DBD1592,PIECES_KO_2
SECT4.N7.V5,DInt,1596,4,2,DBD1596,PIECES_KO_3
SECT4.N7.V6,DInt,1600,4,2,DBD1600,PIECES_KO_4
SECT4.N7.V7,DInt,1604,4,2,DBD1604,PIECES_KO_5
SECT4.N7.V8,DInt,1608,4,2,DBD1608,PIECES_KO_6
SECT4.N7.V9,DInt,1612,4,2,DBD1612,PIECES_KO_7
SECT4.N7.V10,DInt,1616,4,2,DBD1616,PIECES_KO_8
SECT4.N7.V11,DInt,1620,4,2,DBD1620,PIECES_KO_9
SECT4.N7.V12,DInt,1624,4,2,DBD1624,PIECES_KO_10
SECT4.N7.V13,DInt,1628,4,2,DBD1628,T_ALARM_HOURS
SECT4.N7.V14,DInt,1632,4,2,DBD1632,T_DRY_CYCLE_HOURS
SECT4.N7.V15,DInt,1636,4,2,DBD1636,T_POWERED_HOURS
SECT4.N7.V16,DInt,1640,4,2,DBD1640,T_PRODUCT_100_HOURS
SECT4.N7.V17,DInt,1644,4,2,DBD1644,T_PRODUCT_0_HOURS
SECT4.N7.V18,DInt,1648,4,2,DBD1648,T_STOP_HOURS
SECT4.N7.V19,Int,1652,2,2,DBW1652,T_ALARM_MINUTES
SECT4.N7.V20,Int,1654,2,2,DBW1654,T_DRY_CYCLE_MINUTES
SECT4.N7.V21,Int,1656,2,2,DBW1656,T_POWERED_MINUTES
SECT4.N7.V22,Int,1658,2,2,DBW1658,T_PRODUCT_100_MINUTES
SECT4.N7.V23,Int,1660,2,2,DBW1660,T_PRODUCT_0_MINUTES
SECT4.N7.V24,Int,1662,2,2,DBW1662,T_STOP_MINUTES
SECT5,"""UDT SIPA SV Section""",1664,-1,0,DBX1664.0,
SECT5.N1,DInt,1664,4,1,DBD1664,.DB_IOT.STATO_MACCHINA
SECT5.N2,DInt,1668,4,1,DBD1668,.DB_IOT.ALLARME_FERMO
SECT5.N3,DInt,1672,4,1,DBD1672,.DB_IOT.WARNING_ATTIVO (che compromette produzione)
SECT5.N4,Int,1676,2,1,DBW1676,.DB_IOT.STATO_OPERATIVO (Semaforo)
SECT5.N5,Int,1678,2,1,DBW1678,".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
SECT5.N6,DInt,1680,4,1,DBD1680,.DB_IOT.ALARM_STOP_NO
SECT5.N7.V1,DInt,1684,4,2,DBD1684,PIECES_TOT
SECT5.N7.V2,DInt,1688,4,2,DBD1688,PIECES_OK
SECT5.N7.V3,DInt,1692,4,2,DBD1692,PIECES_KO_1
SECT5.N7.V4,DInt,1696,4,2,DBD1696,PIECES_KO_2
SECT5.N7.V5,DInt,1700,4,2,DBD1700,PIECES_KO_3
SECT5.N7.V6,DInt,1704,4,2,DBD1704,PIECES_KO_4
SECT5.N7.V7,DInt,1708,4,2,DBD1708,PIECES_KO_5
SECT5.N7.V8,DInt,1712,4,2,DBD1712,PIECES_KO_6
SECT5.N7.V9,DInt,1716,4,2,DBD1716,PIECES_KO_7
SECT5.N7.V10,DInt,1720,4,2,DBD1720,PIECES_KO_8
SECT5.N7.V11,DInt,1724,4,2,DBD1724,PIECES_KO_9
SECT5.N7.V12,DInt,1728,4,2,DBD1728,PIECES_KO_10
SECT5.N7.V13,DInt,1732,4,2,DBD1732,T_ALARM_HOURS
SECT5.N7.V14,DInt,1736,4,2,DBD1736,T_DRY_CYCLE_HOURS
SECT5.N7.V15,DInt,1740,4,2,DBD1740,T_POWERED_HOURS
SECT5.N7.V16,DInt,1744,4,2,DBD1744,T_PRODUCT_100_HOURS
SECT5.N7.V17,DInt,1748,4,2,DBD1748,T_PRODUCT_0_HOURS
SECT5.N7.V18,DInt,1752,4,2,DBD1752,T_STOP_HOURS
SECT5.N7.V19,Int,1756,2,2,DBW1756,T_ALARM_MINUTES
SECT5.N7.V20,Int,1758,2,2,DBW1758,T_DRY_CYCLE_MINUTES
SECT5.N7.V21,Int,1760,2,2,DBW1760,T_POWERED_MINUTES
SECT5.N7.V22,Int,1762,2,2,DBW1762,T_PRODUCT_100_MINUTES
SECT5.N7.V23,Int,1764,2,2,DBW1764,T_PRODUCT_0_MINUTES
SECT5.N7.V24,Int,1766,2,2,DBW1766,T_STOP_MINUTES

View File

@ -0,0 +1,768 @@
{
"DB SIPA Supervision": {
"Struct": {
"MAIN": {
"type": "\"UDT SIPA SV Main\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.USERLEVEL"
},
"N2": {
"type": "String[81]",
"comment": ".DB_IOT.USERNAME"
},
"N3": {
"type": "String[81]",
"comment": ".DB_IOT.NOME_RICETTA"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.NEXT_MAINT_CYCLES"
},
"N5": {
"V1": {
"type": "String[254]",
"comment": ""
},
"V2": {
"type": "String[254]",
"comment": ""
},
"V3": {
"type": "String[254]",
"comment": ""
},
"V4": {
"type": "String[254]",
"comment": ""
},
"V5": {
"type": "String[8]",
"comment": ""
}
},
"N6": {
"type": "Array[1..3] of Real",
"comment": ".DB_IOT.ELECTRIC_VOLTAGE_PHASE_D",
"array_definition": true,
"Array": {
"[1]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_VOLTAGE_PHASE_D",
"is_array_element": true
},
"[2]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_VOLTAGE_PHASE_D",
"is_array_element": true
},
"[3]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_VOLTAGE_PHASE_D",
"is_array_element": true
}
}
},
"N7": {
"type": "Array[1..3] of Real",
"comment": ".DB_IOT.ELECTRIC_CURRENT_PHASE_D",
"array_definition": true,
"Array": {
"[1]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_CURRENT_PHASE_D",
"is_array_element": true
},
"[2]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_CURRENT_PHASE_D",
"is_array_element": true
},
"[3]": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_CURRENT_PHASE_D",
"is_array_element": true
}
}
},
"N8": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_POWER_D"
},
"N9": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_POWER_FACTOR_D"
},
"N10": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_POWER_HOUR_D"
},
"N11": {
"type": "Real",
"comment": ".DB_IOT.ELECTRIC_POWER_WH"
}
}
}
},
"SECT1": {
"type": "\"UDT SIPA SV Section\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.STATO_MACCHINA"
},
"N2": {
"type": "DInt",
"comment": ".DB_IOT.ALLARME_FERMO"
},
"N3": {
"type": "DInt",
"comment": ".DB_IOT.WARNING_ATTIVO (che compromette produzione)"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.STATO_OPERATIVO (Semaforo)"
},
"N5": {
"type": "Int",
"comment": ".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
},
"N6": {
"type": "DInt",
"comment": ".DB_IOT.ALARM_STOP_NO"
},
"N7": {
"V1": {
"type": "DInt",
"comment": "PIECES_TOT"
},
"V2": {
"type": "DInt",
"comment": "PIECES_OK"
},
"V3": {
"type": "DInt",
"comment": "PIECES_KO_1"
},
"V4": {
"type": "DInt",
"comment": "PIECES_KO_2"
},
"V5": {
"type": "DInt",
"comment": "PIECES_KO_3"
},
"V6": {
"type": "DInt",
"comment": "PIECES_KO_4"
},
"V7": {
"type": "DInt",
"comment": "PIECES_KO_5"
},
"V8": {
"type": "DInt",
"comment": "PIECES_KO_6"
},
"V9": {
"type": "DInt",
"comment": "PIECES_KO_7"
},
"V10": {
"type": "DInt",
"comment": "PIECES_KO_8"
},
"V11": {
"type": "DInt",
"comment": "PIECES_KO_9"
},
"V12": {
"type": "DInt",
"comment": "PIECES_KO_10"
},
"V13": {
"type": "DInt",
"comment": "T_ALARM_HOURS"
},
"V14": {
"type": "DInt",
"comment": "T_DRY_CYCLE_HOURS"
},
"V15": {
"type": "DInt",
"comment": "T_POWERED_HOURS"
},
"V16": {
"type": "DInt",
"comment": "T_PRODUCT_100_HOURS"
},
"V17": {
"type": "DInt",
"comment": "T_PRODUCT_0_HOURS"
},
"V18": {
"type": "DInt",
"comment": "T_STOP_HOURS"
},
"V19": {
"type": "Int",
"comment": "T_ALARM_MINUTES"
},
"V20": {
"type": "Int",
"comment": "T_DRY_CYCLE_MINUTES"
},
"V21": {
"type": "Int",
"comment": "T_POWERED_MINUTES"
},
"V22": {
"type": "Int",
"comment": "T_PRODUCT_100_MINUTES"
},
"V23": {
"type": "Int",
"comment": "T_PRODUCT_0_MINUTES"
},
"V24": {
"type": "Int",
"comment": "T_STOP_MINUTES"
}
}
}
}
},
"SECT2": {
"type": "\"UDT SIPA SV Section\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.STATO_MACCHINA"
},
"N2": {
"type": "DInt",
"comment": ".DB_IOT.ALLARME_FERMO"
},
"N3": {
"type": "DInt",
"comment": ".DB_IOT.WARNING_ATTIVO (che compromette produzione)"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.STATO_OPERATIVO (Semaforo)"
},
"N5": {
"type": "Int",
"comment": ".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
},
"N6": {
"type": "DInt",
"comment": ".DB_IOT.ALARM_STOP_NO"
},
"N7": {
"V1": {
"type": "DInt",
"comment": "PIECES_TOT"
},
"V2": {
"type": "DInt",
"comment": "PIECES_OK"
},
"V3": {
"type": "DInt",
"comment": "PIECES_KO_1"
},
"V4": {
"type": "DInt",
"comment": "PIECES_KO_2"
},
"V5": {
"type": "DInt",
"comment": "PIECES_KO_3"
},
"V6": {
"type": "DInt",
"comment": "PIECES_KO_4"
},
"V7": {
"type": "DInt",
"comment": "PIECES_KO_5"
},
"V8": {
"type": "DInt",
"comment": "PIECES_KO_6"
},
"V9": {
"type": "DInt",
"comment": "PIECES_KO_7"
},
"V10": {
"type": "DInt",
"comment": "PIECES_KO_8"
},
"V11": {
"type": "DInt",
"comment": "PIECES_KO_9"
},
"V12": {
"type": "DInt",
"comment": "PIECES_KO_10"
},
"V13": {
"type": "DInt",
"comment": "T_ALARM_HOURS"
},
"V14": {
"type": "DInt",
"comment": "T_DRY_CYCLE_HOURS"
},
"V15": {
"type": "DInt",
"comment": "T_POWERED_HOURS"
},
"V16": {
"type": "DInt",
"comment": "T_PRODUCT_100_HOURS"
},
"V17": {
"type": "DInt",
"comment": "T_PRODUCT_0_HOURS"
},
"V18": {
"type": "DInt",
"comment": "T_STOP_HOURS"
},
"V19": {
"type": "Int",
"comment": "T_ALARM_MINUTES"
},
"V20": {
"type": "Int",
"comment": "T_DRY_CYCLE_MINUTES"
},
"V21": {
"type": "Int",
"comment": "T_POWERED_MINUTES"
},
"V22": {
"type": "Int",
"comment": "T_PRODUCT_100_MINUTES"
},
"V23": {
"type": "Int",
"comment": "T_PRODUCT_0_MINUTES"
},
"V24": {
"type": "Int",
"comment": "T_STOP_MINUTES"
}
}
}
}
},
"SECT3": {
"type": "\"UDT SIPA SV Section\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.STATO_MACCHINA"
},
"N2": {
"type": "DInt",
"comment": ".DB_IOT.ALLARME_FERMO"
},
"N3": {
"type": "DInt",
"comment": ".DB_IOT.WARNING_ATTIVO (che compromette produzione)"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.STATO_OPERATIVO (Semaforo)"
},
"N5": {
"type": "Int",
"comment": ".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
},
"N6": {
"type": "DInt",
"comment": ".DB_IOT.ALARM_STOP_NO"
},
"N7": {
"V1": {
"type": "DInt",
"comment": "PIECES_TOT"
},
"V2": {
"type": "DInt",
"comment": "PIECES_OK"
},
"V3": {
"type": "DInt",
"comment": "PIECES_KO_1"
},
"V4": {
"type": "DInt",
"comment": "PIECES_KO_2"
},
"V5": {
"type": "DInt",
"comment": "PIECES_KO_3"
},
"V6": {
"type": "DInt",
"comment": "PIECES_KO_4"
},
"V7": {
"type": "DInt",
"comment": "PIECES_KO_5"
},
"V8": {
"type": "DInt",
"comment": "PIECES_KO_6"
},
"V9": {
"type": "DInt",
"comment": "PIECES_KO_7"
},
"V10": {
"type": "DInt",
"comment": "PIECES_KO_8"
},
"V11": {
"type": "DInt",
"comment": "PIECES_KO_9"
},
"V12": {
"type": "DInt",
"comment": "PIECES_KO_10"
},
"V13": {
"type": "DInt",
"comment": "T_ALARM_HOURS"
},
"V14": {
"type": "DInt",
"comment": "T_DRY_CYCLE_HOURS"
},
"V15": {
"type": "DInt",
"comment": "T_POWERED_HOURS"
},
"V16": {
"type": "DInt",
"comment": "T_PRODUCT_100_HOURS"
},
"V17": {
"type": "DInt",
"comment": "T_PRODUCT_0_HOURS"
},
"V18": {
"type": "DInt",
"comment": "T_STOP_HOURS"
},
"V19": {
"type": "Int",
"comment": "T_ALARM_MINUTES"
},
"V20": {
"type": "Int",
"comment": "T_DRY_CYCLE_MINUTES"
},
"V21": {
"type": "Int",
"comment": "T_POWERED_MINUTES"
},
"V22": {
"type": "Int",
"comment": "T_PRODUCT_100_MINUTES"
},
"V23": {
"type": "Int",
"comment": "T_PRODUCT_0_MINUTES"
},
"V24": {
"type": "Int",
"comment": "T_STOP_MINUTES"
}
}
}
}
},
"SECT4": {
"type": "\"UDT SIPA SV Section\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.STATO_MACCHINA"
},
"N2": {
"type": "DInt",
"comment": ".DB_IOT.ALLARME_FERMO"
},
"N3": {
"type": "DInt",
"comment": ".DB_IOT.WARNING_ATTIVO (che compromette produzione)"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.STATO_OPERATIVO (Semaforo)"
},
"N5": {
"type": "Int",
"comment": ".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
},
"N6": {
"type": "DInt",
"comment": ".DB_IOT.ALARM_STOP_NO"
},
"N7": {
"V1": {
"type": "DInt",
"comment": "PIECES_TOT"
},
"V2": {
"type": "DInt",
"comment": "PIECES_OK"
},
"V3": {
"type": "DInt",
"comment": "PIECES_KO_1"
},
"V4": {
"type": "DInt",
"comment": "PIECES_KO_2"
},
"V5": {
"type": "DInt",
"comment": "PIECES_KO_3"
},
"V6": {
"type": "DInt",
"comment": "PIECES_KO_4"
},
"V7": {
"type": "DInt",
"comment": "PIECES_KO_5"
},
"V8": {
"type": "DInt",
"comment": "PIECES_KO_6"
},
"V9": {
"type": "DInt",
"comment": "PIECES_KO_7"
},
"V10": {
"type": "DInt",
"comment": "PIECES_KO_8"
},
"V11": {
"type": "DInt",
"comment": "PIECES_KO_9"
},
"V12": {
"type": "DInt",
"comment": "PIECES_KO_10"
},
"V13": {
"type": "DInt",
"comment": "T_ALARM_HOURS"
},
"V14": {
"type": "DInt",
"comment": "T_DRY_CYCLE_HOURS"
},
"V15": {
"type": "DInt",
"comment": "T_POWERED_HOURS"
},
"V16": {
"type": "DInt",
"comment": "T_PRODUCT_100_HOURS"
},
"V17": {
"type": "DInt",
"comment": "T_PRODUCT_0_HOURS"
},
"V18": {
"type": "DInt",
"comment": "T_STOP_HOURS"
},
"V19": {
"type": "Int",
"comment": "T_ALARM_MINUTES"
},
"V20": {
"type": "Int",
"comment": "T_DRY_CYCLE_MINUTES"
},
"V21": {
"type": "Int",
"comment": "T_POWERED_MINUTES"
},
"V22": {
"type": "Int",
"comment": "T_PRODUCT_100_MINUTES"
},
"V23": {
"type": "Int",
"comment": "T_PRODUCT_0_MINUTES"
},
"V24": {
"type": "Int",
"comment": "T_STOP_MINUTES"
}
}
}
}
},
"SECT5": {
"type": "\"UDT SIPA SV Section\"",
"comment": "",
"is_udt_definition": true,
"fields": {
"Struct": {
"N1": {
"type": "DInt",
"comment": ".DB_IOT.STATO_MACCHINA"
},
"N2": {
"type": "DInt",
"comment": ".DB_IOT.ALLARME_FERMO"
},
"N3": {
"type": "DInt",
"comment": ".DB_IOT.WARNING_ATTIVO (che compromette produzione)"
},
"N4": {
"type": "Int",
"comment": ".DB_IOT.STATO_OPERATIVO (Semaforo)"
},
"N5": {
"type": "Int",
"comment": ".DB_IOT.MODO_OPERATIVO (Prod,Simula,Man, ecc)"
},
"N6": {
"type": "DInt",
"comment": ".DB_IOT.ALARM_STOP_NO"
},
"N7": {
"V1": {
"type": "DInt",
"comment": "PIECES_TOT"
},
"V2": {
"type": "DInt",
"comment": "PIECES_OK"
},
"V3": {
"type": "DInt",
"comment": "PIECES_KO_1"
},
"V4": {
"type": "DInt",
"comment": "PIECES_KO_2"
},
"V5": {
"type": "DInt",
"comment": "PIECES_KO_3"
},
"V6": {
"type": "DInt",
"comment": "PIECES_KO_4"
},
"V7": {
"type": "DInt",
"comment": "PIECES_KO_5"
},
"V8": {
"type": "DInt",
"comment": "PIECES_KO_6"
},
"V9": {
"type": "DInt",
"comment": "PIECES_KO_7"
},
"V10": {
"type": "DInt",
"comment": "PIECES_KO_8"
},
"V11": {
"type": "DInt",
"comment": "PIECES_KO_9"
},
"V12": {
"type": "DInt",
"comment": "PIECES_KO_10"
},
"V13": {
"type": "DInt",
"comment": "T_ALARM_HOURS"
},
"V14": {
"type": "DInt",
"comment": "T_DRY_CYCLE_HOURS"
},
"V15": {
"type": "DInt",
"comment": "T_POWERED_HOURS"
},
"V16": {
"type": "DInt",
"comment": "T_PRODUCT_100_HOURS"
},
"V17": {
"type": "DInt",
"comment": "T_PRODUCT_0_HOURS"
},
"V18": {
"type": "DInt",
"comment": "T_STOP_HOURS"
},
"V19": {
"type": "Int",
"comment": "T_ALARM_MINUTES"
},
"V20": {
"type": "Int",
"comment": "T_DRY_CYCLE_MINUTES"
},
"V21": {
"type": "Int",
"comment": "T_POWERED_MINUTES"
},
"V22": {
"type": "Int",
"comment": "T_PRODUCT_100_MINUTES"
},
"V23": {
"type": "Int",
"comment": "T_PRODUCT_0_MINUTES"
},
"V24": {
"type": "Int",
"comment": "T_STOP_MINUTES"
}
}
}
}
}
}
}
}

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -1,185 +1,48 @@
import re
import os
import json
def clean_line(line):
"""Clean line from BOM and extra spaces or quotes."""
# Remove UTF-8 BOM if exists and strip trailing/leading whitespace
line = line.replace("\ufeff", "").strip()
# Standardize TYPE and DATA_BLOCK definitions to ensure they're properly captured
line = re.sub(r'\s*TYPE\s+"?', 'TYPE "', line)
line = re.sub(r'\s*DATA_BLOCK\s+"?', 'DATA_BLOCK "', line)
line = remove_text_inside_brackets(line)
return line
def remove_text_inside_brackets(text):
# Define the pattern to find text inside brackets
pattern = r"\{.*?\}"
# Use re.sub to replace the found text with an empty string
cleaned_text = re.sub(pattern, '', text)
return cleaned_text
def extract_name(line):
"""Extract the name from TYPE or DATA_BLOCK definition line."""
# Attempt to find a quoted name first
match = re.search(r'(TYPE|DATA_BLOCK)\s+"([^"]+)"', line)
if match:
return match.group(2).strip() # The name is within quotes
# If no quoted name, find an unquoted name
match = re.search(r"(TYPE|DATA_BLOCK)\s+(\S+)", line)
if match:
return match.group(2).strip() # The name is without quotes
def parse_udts(lines):
udt_json = {}
udt_name = None
nested_structs = []
current_struct = None
is_within_struct = False
for line in lines:
line = clean_line(line)
if "TYPE" in line and "END_TYPE" not in line:
udt_name = extract_name(line)
udt_json[udt_name] = {}
current_struct = udt_json[udt_name]
print(f"Created UDT: {udt_name}")
elif "END_TYPE" in line:
print(f"Completed UDT: {udt_name}")
udt_name = None
nested_structs = []
current_struct = None
is_within_struct = False
elif "STRUCT" in line and "END_STRUCT" not in line and udt_name is not None:
struct_name = (
"Struct" if "STRUCT" == line.strip() else line.split(":")[0].strip()
)
new_struct = {}
current_struct[struct_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
is_within_struct = True
print(f"Created STRUCT: {struct_name}")
elif "END_STRUCT" in line and udt_name is not None:
current_struct = nested_structs.pop() if nested_structs else None
is_within_struct = bool(nested_structs)
print(f"Closed STRUCT in UDT '{udt_name}'")
elif udt_name and ":" in line and is_within_struct:
parts = line.split(":")
field_name = parts[0].strip()
field_details = parts[1].strip().split("//")
field_type = (
field_details[0].replace(";", "").strip()
) # Removing ';' from field type
field_comment = parts[1].split("//")[1].strip() if "//" in parts[1] else ""
if "Struct" in field_type:
new_struct = {}
current_struct[field_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
print(f"Opened inline STRUCT at field '{field_name}'")
else:
current_struct[field_name] = {
"type": field_type,
"comment": field_comment,
}
print(
f"Added field '{field_name}' to STRUCT: Type={field_type}, Comment={field_comment}"
)
return udt_json
def parse_dbs(lines, udts):
db_json = {}
db_name = None
nested_structs = []
current_struct = None
is_within_struct = False
for line in lines:
line = clean_line(line)
if "DATA_BLOCK" in line and "END_DATA_BLOCK" not in line:
db_name = extract_name(line)
db_json[db_name] = {}
current_struct = db_json[db_name]
print(f"Created DATA_BLOCK: {db_name}")
elif "END_DATA_BLOCK" in line:
print(f"Completed DATA_BLOCK: {db_name}")
db_name = None
nested_structs = []
current_struct = None
is_within_struct = False
elif "STRUCT" in line and "END_STRUCT" not in line and db_name is not None:
struct_name = (
"Struct" if "STRUCT" == line.strip() else line.split(":")[0].strip()
)
new_struct = {}
current_struct[struct_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
is_within_struct = True
print(f"Created STRUCT in DB '{db_name}': {struct_name}")
elif "END_STRUCT" in line and db_name is not None:
current_struct = nested_structs.pop() if nested_structs else None
is_within_struct = bool(nested_structs)
print(f"Closed STRUCT in DB '{db_name}'")
elif db_name and ":" in line and is_within_struct:
parts = line.split(":")
field_name = parts[0].strip()
field_details = parts[1].strip().split("//")
field_type = (
field_details[0].replace(";", "").strip()
) # Removing ';' from field type
field_comment = parts[1].split("//")[1].strip() if "//" in parts[1] else ""
if "Struct" in field_type:
new_struct = {}
current_struct[field_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
print(f"Opened inline STRUCT at field '{field_name}' in DB '{db_name}'")
else:
current_struct[field_name] = {
"type": field_type,
"comment": field_comment,
}
print(
f"Added field '{field_name}' to STRUCT in DB '{db_name}': Type={field_type}, Comment={field_comment}"
)
return db_json
def save_data_as_json(file_path, data):
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
from CreateTable import *
from ExpandDB import *
from manejoArchivos import *
from CreateTableAndOffsets import convert_to_table
from ExpandDB import expand_dbs
from manejoArchivos import select_file, open_file_explorer
from FileSystem import *
from S7_DBParser import parse_dbs, parse_udts
from Excel import *
if __name__ == "__main__":
file_path = "Data_block_1.db" ## select_file()
file_path = select_file()
if file_path: # Proceed only if a file was selected
with open(file_path, "r", encoding="utf-8-sig") as file:
lines = file.readlines()
file_name, extension, dest_path = extract_file_details(file_path)
dest_path = create_directory(dest_path, file_name)
json_path = build_file_path(dest_path, file_name, "json")
xml_path = build_file_path(dest_path, file_name, "xml")
excel_path = build_file_path(dest_path, file_name, "xlsx")
cvs_path = build_file_path(dest_path, file_name, "cvs")
udt_json = parse_udts(lines)
db_json = parse_dbs(lines, udt_json)
expand_dbs(udt_json, db_json) # Expand DBs with UDT definitions
save_data_as_json("DB_Structure.jon", db_json)
save_data_as_json(db_json, json_path)
# Display the expanded DBs as a table
df = display_as_table(db_json)
save_dataframe_to_file(df) # Save the DataFrame to a CSV file
save_dataframe_to_excel(df,"DB_Structure.xlsx") # Optionally, save the DataFrame to an Excel file
df = convert_to_table(db_json)
save_dataframe_to_file(df, cvs_path) # Save the DataFrame to a CSV file
## EXCEL output
save_dataframe_to_excel(
df, excel_path, file_name
) # Optionally, save the DataFrame to an Excel file
workbook, worksheet = open_worksheet(excel_path, file_name)
autosize_columns(worksheet, ["A", "B", "C", "D", "E", "F", "G"])
center_columns(worksheet, ["C", "D", "E"])
save_workbook(workbook, excel_path)
# Save JSON data to an XML file
save_json_to_xml(db_json)
save_json_to_xml(db_json, xml_path)
# Open the directory containing the new file in Explorer
##open_file_explorer(os.path.dirname(file_path))
open_file_explorer(os.path.dirname(excel_path))
else:
print("No file was selected.")

View File

@ -62,7 +62,7 @@ NON_RETAIN
b2 : Int;
b3 : Array[0..15] of Bool;
b4 : Array[0..11] of Bool;
"Udt" { S7_SetPoint := 'False'} : "UDTd Complicada";
"Udt" { S7_SetPoint := 'False'} : "UDT Complicada";
udt_1 : "UDTComp";
END_STRUCT;

View File

@ -0,0 +1,262 @@
Nombre,Tipo,Offset,Size,Level,Dirección PLC,Comentario
asa,Array[1..8] of Byte,0,0,0,DBX0.0,
asa[1],Byte,0,1,1,DBB0,
asa[2],Byte,1,1,1,DBB1,
asa[3],Byte,2,1,1,DBB2,
asa[4],Byte,3,1,1,DBB3,
asa[5],Byte,4,1,1,DBB4,
asa[6],Byte,5,1,1,DBB5,
asa[7],Byte,6,1,1,DBB6,
asa[8],Byte,7,1,1,DBB7,
b0,Bool,8,0.1,0,DBX8.0,
b1,Bool,8,0.1,0,DBX8.1,
s5,S5Time,10,2,0,DBW10,
s,LInt,12,8,0,DBX12.0,
s_1,WString,20,512,0,DBX20.0,
s_2,UInt,532,2,0,DBW532,
ss,LReal,534,8,0,DBX534.0,
b2,Int,542,2,0,DBW542,
b3,Array[0..15] of Bool,544,0,0,DBX544.0,
b3[0],Bool,544,0.1,1,DBX544.0,
b3[1],Bool,544,0.1,1,DBX544.1,
b3[2],Bool,544,0.1,1,DBX544.2,
b3[3],Bool,544,0.1,1,DBX544.3,
b3[4],Bool,544,0.1,1,DBX544.4,
b3[5],Bool,544,0.1,1,DBX544.5,
b3[6],Bool,544,0.1,1,DBX544.6,
b3[7],Bool,544,0.1,1,DBX544.7,
b3[8],Bool,545,0.1,1,DBX545.0,
b3[9],Bool,545,0.1,1,DBX545.1,
b3[10],Bool,545,0.1,1,DBX545.2,
b3[11],Bool,545,0.1,1,DBX545.3,
b3[12],Bool,545,0.1,1,DBX545.4,
b3[13],Bool,545,0.1,1,DBX545.5,
b3[14],Bool,545,0.1,1,DBX545.6,
b3[15],Bool,545,0.1,1,DBX545.7,
b4,Array[0..11] of Bool,546,0,0,DBX546.0,
b4[0],Bool,546,0.1,1,DBX546.0,
b4[1],Bool,546,0.1,1,DBX546.1,
b4[2],Bool,546,0.1,1,DBX546.2,
b4[3],Bool,546,0.1,1,DBX546.3,
b4[4],Bool,546,0.1,1,DBX546.4,
b4[5],Bool,546,0.1,1,DBX546.5,
b4[6],Bool,546,0.1,1,DBX546.6,
b4[7],Bool,546,0.1,1,DBX546.7,
b4[8],Bool,547,0.1,1,DBX547.0,
b4[9],Bool,547,0.1,1,DBX547.1,
b4[10],Bool,547,0.1,1,DBX547.2,
b4[11],Bool,547,0.1,1,DBX547.3,
"""Udt""","""UDT Complicada""",548,-1,0,DBX548.0,
"""Udt"".""String""",String,548,256,1,DBX548.0,
"""Udt"".String_1",String[60],804,62,1,DBX804.0,
"""Udt"".""Bool""",Bool,866,0.1,1,DBX866.0,
"""Udt"".Bool_1",Array[0..1] of Bool,868,0,1,DBX868.0,
"""Udt"".Bool_1[0]",Bool,868,0.1,2,DBX868.0,
"""Udt"".Bool_1[1]",Bool,868,0.1,2,DBX868.1,
"""Udt"".Bool_2",Array[0..51] of Bool,870,0,1,DBX870.0,
"""Udt"".Bool_2[0]",Bool,870,0.1,2,DBX870.0,
"""Udt"".Bool_2[1]",Bool,870,0.1,2,DBX870.1,
"""Udt"".Bool_2[2]",Bool,870,0.1,2,DBX870.2,
"""Udt"".Bool_2[3]",Bool,870,0.1,2,DBX870.3,
"""Udt"".Bool_2[4]",Bool,870,0.1,2,DBX870.4,
"""Udt"".Bool_2[5]",Bool,870,0.1,2,DBX870.5,
"""Udt"".Bool_2[6]",Bool,870,0.1,2,DBX870.6,
"""Udt"".Bool_2[7]",Bool,870,0.1,2,DBX870.7,
"""Udt"".Bool_2[8]",Bool,871,0.1,2,DBX871.0,
"""Udt"".Bool_2[9]",Bool,871,0.1,2,DBX871.1,
"""Udt"".Bool_2[10]",Bool,871,0.1,2,DBX871.2,
"""Udt"".Bool_2[11]",Bool,871,0.1,2,DBX871.3,
"""Udt"".Bool_2[12]",Bool,871,0.1,2,DBX871.4,
"""Udt"".Bool_2[13]",Bool,871,0.1,2,DBX871.5,
"""Udt"".Bool_2[14]",Bool,871,0.1,2,DBX871.6,
"""Udt"".Bool_2[15]",Bool,871,0.1,2,DBX871.7,
"""Udt"".Bool_2[16]",Bool,872,0.1,2,DBX872.0,
"""Udt"".Bool_2[17]",Bool,872,0.1,2,DBX872.1,
"""Udt"".Bool_2[18]",Bool,872,0.1,2,DBX872.2,
"""Udt"".Bool_2[19]",Bool,872,0.1,2,DBX872.3,
"""Udt"".Bool_2[20]",Bool,872,0.1,2,DBX872.4,
"""Udt"".Bool_2[21]",Bool,872,0.1,2,DBX872.5,
"""Udt"".Bool_2[22]",Bool,872,0.1,2,DBX872.6,
"""Udt"".Bool_2[23]",Bool,872,0.1,2,DBX872.7,
"""Udt"".Bool_2[24]",Bool,873,0.1,2,DBX873.0,
"""Udt"".Bool_2[25]",Bool,873,0.1,2,DBX873.1,
"""Udt"".Bool_2[26]",Bool,873,0.1,2,DBX873.2,
"""Udt"".Bool_2[27]",Bool,873,0.1,2,DBX873.3,
"""Udt"".Bool_2[28]",Bool,873,0.1,2,DBX873.4,
"""Udt"".Bool_2[29]",Bool,873,0.1,2,DBX873.5,
"""Udt"".Bool_2[30]",Bool,873,0.1,2,DBX873.6,
"""Udt"".Bool_2[31]",Bool,873,0.1,2,DBX873.7,
"""Udt"".Bool_2[32]",Bool,874,0.1,2,DBX874.0,
"""Udt"".Bool_2[33]",Bool,874,0.1,2,DBX874.1,
"""Udt"".Bool_2[34]",Bool,874,0.1,2,DBX874.2,
"""Udt"".Bool_2[35]",Bool,874,0.1,2,DBX874.3,
"""Udt"".Bool_2[36]",Bool,874,0.1,2,DBX874.4,
"""Udt"".Bool_2[37]",Bool,874,0.1,2,DBX874.5,
"""Udt"".Bool_2[38]",Bool,874,0.1,2,DBX874.6,
"""Udt"".Bool_2[39]",Bool,874,0.1,2,DBX874.7,
"""Udt"".Bool_2[40]",Bool,875,0.1,2,DBX875.0,
"""Udt"".Bool_2[41]",Bool,875,0.1,2,DBX875.1,
"""Udt"".Bool_2[42]",Bool,875,0.1,2,DBX875.2,
"""Udt"".Bool_2[43]",Bool,875,0.1,2,DBX875.3,
"""Udt"".Bool_2[44]",Bool,875,0.1,2,DBX875.4,
"""Udt"".Bool_2[45]",Bool,875,0.1,2,DBX875.5,
"""Udt"".Bool_2[46]",Bool,875,0.1,2,DBX875.6,
"""Udt"".Bool_2[47]",Bool,875,0.1,2,DBX875.7,
"""Udt"".Bool_2[48]",Bool,876,0.1,2,DBX876.0,
"""Udt"".Bool_2[49]",Bool,876,0.1,2,DBX876.1,
"""Udt"".Bool_2[50]",Bool,876,0.1,2,DBX876.2,
"""Udt"".Bool_2[51]",Bool,876,0.1,2,DBX876.3,
"""Udt"".""Int""",Int,878,2,1,DBW878,
"""Udt"".""Hola como Estas""",Int,880,2,1,DBW880,
"""Udt"".""No se""",Int,882,2,1,DBW882,
"""Udt"".""Real""",Array[0..10] of Real,884,0,1,DBX884.0,
"""Udt"".""Real""[0]",Real,884,4,2,DBD884,
"""Udt"".""Real""[1]",Real,888,4,2,DBD888,
"""Udt"".""Real""[2]",Real,892,4,2,DBD892,
"""Udt"".""Real""[3]",Real,896,4,2,DBD896,
"""Udt"".""Real""[4]",Real,900,4,2,DBD900,
"""Udt"".""Real""[5]",Real,904,4,2,DBD904,
"""Udt"".""Real""[6]",Real,908,4,2,DBD908,
"""Udt"".""Real""[7]",Real,912,4,2,DBD912,
"""Udt"".""Real""[8]",Real,916,4,2,DBD916,
"""Udt"".""Real""[9]",Real,920,4,2,DBD920,
"""Udt"".""Real""[10]",Real,924,4,2,DBD924,
"""Udt"".Bool_3",Bool,928,0.1,1,DBX928.0,
"""Udt"".Bool_4",Bool,928,0.1,1,DBX928.1,
"""Udt"".Bool_5",Array[0..7] of Bool,930,0,1,DBX930.0,
"""Udt"".Bool_5[0]",Bool,930,0.1,2,DBX930.0,
"""Udt"".Bool_5[1]",Bool,930,0.1,2,DBX930.1,
"""Udt"".Bool_5[2]",Bool,930,0.1,2,DBX930.2,
"""Udt"".Bool_5[3]",Bool,930,0.1,2,DBX930.3,
"""Udt"".Bool_5[4]",Bool,930,0.1,2,DBX930.4,
"""Udt"".Bool_5[5]",Bool,930,0.1,2,DBX930.5,
"""Udt"".Bool_5[6]",Bool,930,0.1,2,DBX930.6,
"""Udt"".Bool_5[7]",Bool,930,0.1,2,DBX930.7,
"""Udt"".Bool_6",Array[0..15] of Bool,932,0,1,DBX932.0,
"""Udt"".Bool_6[0]",Bool,932,0.1,2,DBX932.0,
"""Udt"".Bool_6[1]",Bool,932,0.1,2,DBX932.1,
"""Udt"".Bool_6[2]",Bool,932,0.1,2,DBX932.2,
"""Udt"".Bool_6[3]",Bool,932,0.1,2,DBX932.3,
"""Udt"".Bool_6[4]",Bool,932,0.1,2,DBX932.4,
"""Udt"".Bool_6[5]",Bool,932,0.1,2,DBX932.5,
"""Udt"".Bool_6[6]",Bool,932,0.1,2,DBX932.6,
"""Udt"".Bool_6[7]",Bool,932,0.1,2,DBX932.7,
"""Udt"".Bool_6[8]",Bool,933,0.1,2,DBX933.0,
"""Udt"".Bool_6[9]",Bool,933,0.1,2,DBX933.1,
"""Udt"".Bool_6[10]",Bool,933,0.1,2,DBX933.2,
"""Udt"".Bool_6[11]",Bool,933,0.1,2,DBX933.3,
"""Udt"".Bool_6[12]",Bool,933,0.1,2,DBX933.4,
"""Udt"".Bool_6[13]",Bool,933,0.1,2,DBX933.5,
"""Udt"".Bool_6[14]",Bool,933,0.1,2,DBX933.6,
"""Udt"".Bool_6[15]",Bool,933,0.1,2,DBX933.7,
"""Udt"".Int_1",Int,934,2,1,DBW934,
"""Udt"".""Time""",Time,936,4,1,DBD936,
"""Udt"".Time_of",Time_Of_Day,940,4,1,DBD940,
"""Udt"".""Word""",Word,944,2,1,DBW944,
udt_1,"""UDTComp""",946,-1,0,DBX946.0,
"udt_1.""String""",String,946,256,1,DBX946.0,
udt_1.String_1,String[60],1202,62,1,DBX1202.0,
"udt_1.""Bool""",Bool,1264,0.1,1,DBX1264.0,
udt_1.Bool_1,Array[0..1] of Bool,1266,0,1,DBX1266.0,
udt_1.Bool_1[0],Bool,1266,0.1,2,DBX1266.0,
udt_1.Bool_1[1],Bool,1266,0.1,2,DBX1266.1,
udt_1.Bool_2,Array[0..51] of Bool,1268,0,1,DBX1268.0,
udt_1.Bool_2[0],Bool,1268,0.1,2,DBX1268.0,
udt_1.Bool_2[1],Bool,1268,0.1,2,DBX1268.1,
udt_1.Bool_2[2],Bool,1268,0.1,2,DBX1268.2,
udt_1.Bool_2[3],Bool,1268,0.1,2,DBX1268.3,
udt_1.Bool_2[4],Bool,1268,0.1,2,DBX1268.4,
udt_1.Bool_2[5],Bool,1268,0.1,2,DBX1268.5,
udt_1.Bool_2[6],Bool,1268,0.1,2,DBX1268.6,
udt_1.Bool_2[7],Bool,1268,0.1,2,DBX1268.7,
udt_1.Bool_2[8],Bool,1269,0.1,2,DBX1269.0,
udt_1.Bool_2[9],Bool,1269,0.1,2,DBX1269.1,
udt_1.Bool_2[10],Bool,1269,0.1,2,DBX1269.2,
udt_1.Bool_2[11],Bool,1269,0.1,2,DBX1269.3,
udt_1.Bool_2[12],Bool,1269,0.1,2,DBX1269.4,
udt_1.Bool_2[13],Bool,1269,0.1,2,DBX1269.5,
udt_1.Bool_2[14],Bool,1269,0.1,2,DBX1269.6,
udt_1.Bool_2[15],Bool,1269,0.1,2,DBX1269.7,
udt_1.Bool_2[16],Bool,1270,0.1,2,DBX1270.0,
udt_1.Bool_2[17],Bool,1270,0.1,2,DBX1270.1,
udt_1.Bool_2[18],Bool,1270,0.1,2,DBX1270.2,
udt_1.Bool_2[19],Bool,1270,0.1,2,DBX1270.3,
udt_1.Bool_2[20],Bool,1270,0.1,2,DBX1270.4,
udt_1.Bool_2[21],Bool,1270,0.1,2,DBX1270.5,
udt_1.Bool_2[22],Bool,1270,0.1,2,DBX1270.6,
udt_1.Bool_2[23],Bool,1270,0.1,2,DBX1270.7,
udt_1.Bool_2[24],Bool,1271,0.1,2,DBX1271.0,
udt_1.Bool_2[25],Bool,1271,0.1,2,DBX1271.1,
udt_1.Bool_2[26],Bool,1271,0.1,2,DBX1271.2,
udt_1.Bool_2[27],Bool,1271,0.1,2,DBX1271.3,
udt_1.Bool_2[28],Bool,1271,0.1,2,DBX1271.4,
udt_1.Bool_2[29],Bool,1271,0.1,2,DBX1271.5,
udt_1.Bool_2[30],Bool,1271,0.1,2,DBX1271.6,
udt_1.Bool_2[31],Bool,1271,0.1,2,DBX1271.7,
udt_1.Bool_2[32],Bool,1272,0.1,2,DBX1272.0,
udt_1.Bool_2[33],Bool,1272,0.1,2,DBX1272.1,
udt_1.Bool_2[34],Bool,1272,0.1,2,DBX1272.2,
udt_1.Bool_2[35],Bool,1272,0.1,2,DBX1272.3,
udt_1.Bool_2[36],Bool,1272,0.1,2,DBX1272.4,
udt_1.Bool_2[37],Bool,1272,0.1,2,DBX1272.5,
udt_1.Bool_2[38],Bool,1272,0.1,2,DBX1272.6,
udt_1.Bool_2[39],Bool,1272,0.1,2,DBX1272.7,
udt_1.Bool_2[40],Bool,1273,0.1,2,DBX1273.0,
udt_1.Bool_2[41],Bool,1273,0.1,2,DBX1273.1,
udt_1.Bool_2[42],Bool,1273,0.1,2,DBX1273.2,
udt_1.Bool_2[43],Bool,1273,0.1,2,DBX1273.3,
udt_1.Bool_2[44],Bool,1273,0.1,2,DBX1273.4,
udt_1.Bool_2[45],Bool,1273,0.1,2,DBX1273.5,
udt_1.Bool_2[46],Bool,1273,0.1,2,DBX1273.6,
udt_1.Bool_2[47],Bool,1273,0.1,2,DBX1273.7,
udt_1.Bool_2[48],Bool,1274,0.1,2,DBX1274.0,
udt_1.Bool_2[49],Bool,1274,0.1,2,DBX1274.1,
udt_1.Bool_2[50],Bool,1274,0.1,2,DBX1274.2,
udt_1.Bool_2[51],Bool,1274,0.1,2,DBX1274.3,
"udt_1.""Int""",Int,1276,2,1,DBW1276,
"udt_1.""Hola como Estas""",Int,1278,2,1,DBW1278,
"udt_1.""No se""",Int,1280,2,1,DBW1280,
"udt_1.""Real""",Array[0..10] of Real,1282,0,1,DBX1282.0,
"udt_1.""Real""[0]",Real,1282,4,2,DBD1282,
"udt_1.""Real""[1]",Real,1286,4,2,DBD1286,
"udt_1.""Real""[2]",Real,1290,4,2,DBD1290,
"udt_1.""Real""[3]",Real,1294,4,2,DBD1294,
"udt_1.""Real""[4]",Real,1298,4,2,DBD1298,
"udt_1.""Real""[5]",Real,1302,4,2,DBD1302,
"udt_1.""Real""[6]",Real,1306,4,2,DBD1306,
"udt_1.""Real""[7]",Real,1310,4,2,DBD1310,
"udt_1.""Real""[8]",Real,1314,4,2,DBD1314,
"udt_1.""Real""[9]",Real,1318,4,2,DBD1318,
"udt_1.""Real""[10]",Real,1322,4,2,DBD1322,
udt_1.Bool_3,Bool,1326,0.1,1,DBX1326.0,
udt_1.Bool_4,Bool,1326,0.1,1,DBX1326.1,
udt_1.Bool_5,Array[0..7] of Bool,1328,0,1,DBX1328.0,
udt_1.Bool_5[0],Bool,1328,0.1,2,DBX1328.0,
udt_1.Bool_5[1],Bool,1328,0.1,2,DBX1328.1,
udt_1.Bool_5[2],Bool,1328,0.1,2,DBX1328.2,
udt_1.Bool_5[3],Bool,1328,0.1,2,DBX1328.3,
udt_1.Bool_5[4],Bool,1328,0.1,2,DBX1328.4,
udt_1.Bool_5[5],Bool,1328,0.1,2,DBX1328.5,
udt_1.Bool_5[6],Bool,1328,0.1,2,DBX1328.6,
udt_1.Bool_5[7],Bool,1328,0.1,2,DBX1328.7,
udt_1.Bool_6,Array[0..15] of Bool,1330,0,1,DBX1330.0,
udt_1.Bool_6[0],Bool,1330,0.1,2,DBX1330.0,
udt_1.Bool_6[1],Bool,1330,0.1,2,DBX1330.1,
udt_1.Bool_6[2],Bool,1330,0.1,2,DBX1330.2,
udt_1.Bool_6[3],Bool,1330,0.1,2,DBX1330.3,
udt_1.Bool_6[4],Bool,1330,0.1,2,DBX1330.4,
udt_1.Bool_6[5],Bool,1330,0.1,2,DBX1330.5,
udt_1.Bool_6[6],Bool,1330,0.1,2,DBX1330.6,
udt_1.Bool_6[7],Bool,1330,0.1,2,DBX1330.7,
udt_1.Bool_6[8],Bool,1331,0.1,2,DBX1331.0,
udt_1.Bool_6[9],Bool,1331,0.1,2,DBX1331.1,
udt_1.Bool_6[10],Bool,1331,0.1,2,DBX1331.2,
udt_1.Bool_6[11],Bool,1331,0.1,2,DBX1331.3,
udt_1.Bool_6[12],Bool,1331,0.1,2,DBX1331.4,
udt_1.Bool_6[13],Bool,1331,0.1,2,DBX1331.5,
udt_1.Bool_6[14],Bool,1331,0.1,2,DBX1331.6,
udt_1.Bool_6[15],Bool,1331,0.1,2,DBX1331.7,
udt_1.Int_1,Int,1332,2,1,DBW1332,
"udt_1.""Time""",Time,1334,4,1,DBD1334,
udt_1.Time_of,Time_Of_Day,1338,4,1,DBD1338,
"udt_1.""Word""",Word,1342,2,1,DBW1342,

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

90
Excel.py Normal file
View File

@ -0,0 +1,90 @@
from openpyxl.styles import Alignment
def center_columns(worksheet, columns):
"""
Center the content of specified columns in a worksheet.
Parameters:
worksheet: The worksheet object from openpyxl.
columns (list of str): List of column letters to be centered, e.g., ['B', 'D', 'E'].
"""
for col_letter in columns:
for cell in worksheet[col_letter]:
cell.alignment = Alignment(horizontal='center')
from openpyxl import Workbook
from openpyxl.utils import get_column_letter
def autosize_columns(worksheet, columns):
"""
Adjust the column width to the maximum content size of specified columns in a worksheet.
Parameters:
worksheet: The worksheet object from openpyxl.
columns (list of str): List of column letters to be resized, e.g., ['A', 'C', 'F'].
"""
for col_letter in columns:
max_length = 0
col = worksheet[col_letter]
for cell in col:
try:
if len(str(cell.value)) > max_length:
max_length = len(str(cell.value))
except:
pass
adjusted_width = (max_length + 2) * 1.2 # Adding a little extra width
worksheet.column_dimensions[col_letter].width = adjusted_width
from openpyxl import load_workbook
def open_worksheet(file_path, sheet_name):
"""
Opens an Excel file and returns a specific worksheet by name.
Parameters:
file_path (str): The path to the Excel file.
sheet_name (str): The name of the worksheet to load.
Returns:
worksheet: The loaded worksheet object, or None if the sheet does not exist.
"""
try:
# Cargar el libro de trabajo desde el archivo especificado
workbook = load_workbook(filename=file_path)
# Intentar obtener la hoja de trabajo por nombre
if sheet_name in workbook.sheetnames:
worksheet = workbook[sheet_name]
return workbook, worksheet
else:
print(f"Sheet named '{sheet_name}' does not exist in the workbook.")
return None, None
except FileNotFoundError:
print(f"No such file: '{file_path}'")
return None, None
except Exception as e:
print(f"An error occurred: {e}")
return None, None
def save_workbook(workbook, file_path):
"""
Saves the modified workbook to a specified file path.
Parameters:
workbook: The openpyxl workbook object that has been modified.
file_path (str): The file path where the workbook should be saved.
Returns:
bool: True if the workbook was saved successfully, False otherwise.
"""
try:
workbook.save(file_path)
print(f"Workbook saved successfully to '{file_path}'.")
return True
except Exception as e:
print(f"Failed to save the workbook. Error: {e}")
return False

114
FileSystem.py Normal file
View File

@ -0,0 +1,114 @@
import os
import xmltodict
def extract_file_details(file_path):
"""
Extracts and returns the file name without extension, the file extension, and the file path.
Parameters:
file_path (str): The full path to the file.
Returns:
tuple: (file_name_without_extension, file_extension, path_only)
Uso:
file_name, extension, path = extract_file_details(file_path)
"""
# Extrae el path completo del directorio
path_only = os.path.dirname(file_path)
# Extrae el nombre completo del archivo con extension
full_file_name = os.path.basename(file_path)
# Separa la extension del nombre del archivo
file_name_without_extension, file_extension = os.path.splitext(full_file_name)
return (file_name_without_extension, file_extension, path_only)
def create_directory(base_path, additional_path=""):
"""
Creates a directory at the specified base path, optionally extended by an additional path.
This function handles paths whether or not they end with a slash.
Parameters:
base_path (str): The base path of the directory to create.
additional_path (str, optional): Additional path elements to be appended. Default is empty.
Returns:
bool: True if the directory was created successfully, False otherwise.
"""
# Construye el path completo asegurando que los componentes están correctamente separados
full_path = os.path.join(base_path, additional_path)
try:
# Intenta crear el directorio, incluyendo todos los directorios intermedios necesarios
os.makedirs(full_path, exist_ok=True)
print(f"Directory '{full_path}' created successfully.")
return full_path
except Exception as e:
# Captura cualquier error que ocurra y lo muestra
print(f"Failed to create directory '{full_path}'. Error: {e}")
return full_path
def build_file_path(base_path, file_name, extension):
"""
Constructs a complete file path given a base path, a file name, and an extension.
Handles cases where the file name may include an incorrect or correct extension.
Parameters:
base_path (str): The base directory path where the file will be located.
file_name (str): The file name, which may or may not include an extension.
extension (str): The desired file extension (e.g., 'txt', '.txt').
Returns:
str: The complete file path including the base path, file name, and extension.
"""
# Ensure the extension is in the correct format (i.e., starts with a dot)
if not extension.startswith('.'):
extension = '.' + extension
# Separate the base file name from its extension if present
file_name_without_extension, _ = os.path.splitext(file_name)
# Reconstruct the file name with the correct extension
file_name_corrected = file_name_without_extension + extension
# Construct the full file path
full_path = os.path.join(base_path, file_name_corrected)
return full_path
import json
def save_data_as_json(data ,file_path):
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
def save_json_to_xml(json_data, filename="DB_Structure.xml"):
"""
Convert JSON data to XML and save it to a file.
"""
xml_data = xmltodict.unparse({"root": json_data}, pretty=True)
with open(filename, "w") as xml_file:
xml_file.write(xml_data)
print(f"XML data saved to {filename}")
def save_dataframe_to_excel(df, filename="DB_Structure.xlsx", sheet_name= "DB"):
"""
Save the provided DataFrame to an Excel file.
"""
df.to_excel(filename, index=False, sheet_name=sheet_name)
print(f"Data saved to {filename}")
def save_dataframe_to_file(df, filename="DB_Structure.csv"):
"""
Save the provided DataFrame to a CSV file.
"""
df.to_csv(filename, index=False)
print(f"Data saved to {filename}")

152
S7_DBParser.py Normal file
View File

@ -0,0 +1,152 @@
import re
import os
import json
def clean_line(line):
"""Clean line from BOM and extra spaces or quotes."""
# Remove UTF-8 BOM if exists and strip trailing/leading whitespace
line = line.replace("\ufeff", "").strip()
# Standardize TYPE and DATA_BLOCK definitions to ensure they're properly captured
line = re.sub(r'\s*TYPE\s+"?', 'TYPE "', line)
line = re.sub(r'\s*DATA_BLOCK\s+"?', 'DATA_BLOCK "', line)
line = remove_text_inside_brackets(line)
return line
def remove_text_inside_brackets(text):
# Define the pattern to find text inside brackets
pattern = r"\{.*?\}"
# Use re.sub to replace the found text with an empty string
cleaned_text = re.sub(pattern, '', text)
return cleaned_text
def extract_name(line):
"""Extract the name from TYPE or DATA_BLOCK definition line."""
# Attempt to find a quoted name first
match = re.search(r'(TYPE|DATA_BLOCK)\s+"([^"]+)"', line)
if match:
return match.group(2).strip() # The name is within quotes
# If no quoted name, find an unquoted name
match = re.search(r"(TYPE|DATA_BLOCK)\s+(\S+)", line)
if match:
return match.group(2).strip() # The name is without quotes
def parse_udts(lines):
udt_json = {}
udt_name = None
nested_structs = []
current_struct = None
is_within_struct = False
for line in lines:
line = clean_line(line)
if "TYPE" in line and "END_TYPE" not in line:
udt_name = extract_name(line)
udt_json[udt_name] = {}
current_struct = udt_json[udt_name]
print(f"Created UDT: {udt_name}")
elif "END_TYPE" in line:
print(f"Completed UDT: {udt_name}")
udt_name = None
nested_structs = []
current_struct = None
is_within_struct = False
elif "STRUCT" in line and "END_STRUCT" not in line and udt_name is not None:
struct_name = (
"Struct" if "STRUCT" == line.strip() else line.split(":")[0].strip()
)
new_struct = {}
current_struct[struct_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
is_within_struct = True
print(f"Created STRUCT: {struct_name}")
elif "END_STRUCT" in line and udt_name is not None:
current_struct = nested_structs.pop() if nested_structs else None
is_within_struct = bool(nested_structs)
print(f"Closed STRUCT in UDT '{udt_name}'")
elif udt_name and ":" in line and is_within_struct:
parts = line.split(":")
field_name = parts[0].strip()
field_details = parts[1].strip().split("//")
field_type = (
field_details[0].replace(";", "").strip()
) # Removing ';' from field type
field_comment = parts[1].split("//")[1].strip() if "//" in parts[1] else ""
if "Struct" in field_type:
new_struct = {}
current_struct[field_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
print(f"Opened inline STRUCT at field '{field_name}'")
else:
current_struct[field_name] = {
"type": field_type,
"comment": field_comment,
}
print(
f"Added field '{field_name}' to STRUCT: Type={field_type}, Comment={field_comment}"
)
return udt_json
def parse_dbs(lines, udts):
db_json = {}
db_name = None
nested_structs = []
current_struct = None
is_within_struct = False
for line in lines:
line = clean_line(line)
if "DATA_BLOCK" in line and "END_DATA_BLOCK" not in line:
db_name = extract_name(line)
db_json[db_name] = {}
current_struct = db_json[db_name]
print(f"Created DATA_BLOCK: {db_name}")
elif "END_DATA_BLOCK" in line:
print(f"Completed DATA_BLOCK: {db_name}")
db_name = None
nested_structs = []
current_struct = None
is_within_struct = False
elif "STRUCT" in line and "END_STRUCT" not in line and db_name is not None:
struct_name = (
"Struct" if "STRUCT" == line.strip() else line.split(":")[0].strip()
)
new_struct = {}
current_struct[struct_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
is_within_struct = True
print(f"Created STRUCT in DB '{db_name}': {struct_name}")
elif "END_STRUCT" in line and db_name is not None:
current_struct = nested_structs.pop() if nested_structs else None
is_within_struct = bool(nested_structs)
print(f"Closed STRUCT in DB '{db_name}'")
elif db_name and ":" in line and is_within_struct:
parts = line.split(":")
field_name = parts[0].strip()
field_details = parts[1].strip().split("//")
field_type = (
field_details[0].replace(";", "").strip()
) # Removing ';' from field type
field_comment = parts[1].split("//")[1].strip() if "//" in parts[1] else ""
if "Struct" in field_type:
new_struct = {}
current_struct[field_name] = new_struct
nested_structs.append(current_struct)
current_struct = new_struct
print(f"Opened inline STRUCT at field '{field_name}' in DB '{db_name}'")
else:
current_struct[field_name] = {
"type": field_type,
"comment": field_comment,
}
print(
f"Added field '{field_name}' to STRUCT in DB '{db_name}': Type={field_type}, Comment={field_comment}"
)
return db_json

Binary file not shown.