Primera Version
This commit is contained in:
commit
68cdc4c6ae
|
@ -0,0 +1,174 @@
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
#uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,186 @@
|
||||||
|
# Project Hardware & IO Summary (from CAx Export)
|
||||||
|
|
||||||
|
## Network Configuration
|
||||||
|
| Parent Device | Interface/Node Name | Type | Address (IP/DP) |
|
||||||
|
|---|---|---|---|
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 12 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 20 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 21 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 22 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 10 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 8 |
|
||||||
|
| SAE196_c0.2.XML | PB1 | Profibus | 40 |
|
||||||
|
| SAE196_c0.2.XML | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| SAE196_c0.2.XML | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| SAE196_c0.2.XML | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| SAE196_c0.2.XML | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| SAE196_c0.2.XML | P1 | Profibus | 1 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 12 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 20 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 21 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 22 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 10 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 8 |
|
||||||
|
| Ungrouped devices | PB1 | Profibus | 40 |
|
||||||
|
| GSD device_10 | PB1 | Profibus | 12 |
|
||||||
|
| Telaio di montaggio_0 | PB1 | Profibus | 12 |
|
||||||
|
| Festo_283E1 | PB1 | Profibus | 12 |
|
||||||
|
| DP interface | PB1 | Profibus | 12 |
|
||||||
|
| GSD device_11 | PB1 | Profibus | 20 |
|
||||||
|
| Telaio di montaggio_0 | PB1 | Profibus | 20 |
|
||||||
|
| Water_Volumetric_251U1 | PB1 | Profibus | 20 |
|
||||||
|
| DP interface | PB1 | Profibus | 20 |
|
||||||
|
| GSD device_12 | PB1 | Profibus | 21 |
|
||||||
|
| Telaio di montaggio_0 | PB1 | Profibus | 21 |
|
||||||
|
| Syrup_Mass_252U1 | PB1 | Profibus | 21 |
|
||||||
|
| DP interface | PB1 | Profibus | 21 |
|
||||||
|
| GSD device_13 | PB1 | Profibus | 22 |
|
||||||
|
| Telaio di montaggio_0 | PB1 | Profibus | 22 |
|
||||||
|
| CO2_Mass_253U1 | PB1 | Profibus | 22 |
|
||||||
|
| DP interface | PB1 | Profibus | 22 |
|
||||||
|
| ET 200SP station_2 | PB1 | Profibus | 10 |
|
||||||
|
| Telaio di montaggio_0 | PB1 | Profibus | 10 |
|
||||||
|
| IM151_280A0 | PB1 | Profibus | 10 |
|
||||||
|
| DP interface | PB1 | Profibus | 10 |
|
||||||
|
| Dispositivo GSD_2 | PB1 | Profibus | 8 |
|
||||||
|
| Rack_0 | PB1 | Profibus | 8 |
|
||||||
|
| 045U1 | PB1 | Profibus | 8 |
|
||||||
|
| DP interface | PB1 | Profibus | 8 |
|
||||||
|
| GSD device_1 | PB1 | Profibus | 40 |
|
||||||
|
| Rack_0 | PB1 | Profibus | 40 |
|
||||||
|
| Anton Paar_120U1 | PB1 | Profibus | 40 |
|
||||||
|
| DP interface | PB1 | Profibus | 40 |
|
||||||
|
| S71500/ET200MP station_1 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| S71500/ET200MP station_1 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| S71500/ET200MP station_1 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| S71500/ET200MP station_1 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| S71500/ET200MP station_1 | P1 | Profibus | 1 |
|
||||||
|
| Telaio di montaggio_0 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| Telaio di montaggio_0 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| Telaio di montaggio_0 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| Telaio di montaggio_0 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| Telaio di montaggio_0 | P1 | Profibus | 1 |
|
||||||
|
| PLC | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| PLC | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| PLC | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| PLC | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| PLC | P1 | Profibus | 1 |
|
||||||
|
| Interfaccia PROFINET_1 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| Interfaccia PROFINET_1 | E1 | Ethernet | 192.168.1.1 |
|
||||||
|
| Interfaccia PROFINET_2 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| Interfaccia PROFINET_2 | E1 | Ethernet | 10.1.33.11 |
|
||||||
|
| Interfaccia DP_1 | P1 | Profibus | 1 |
|
||||||
|
|
||||||
|
## I/O Modules & Addresses
|
||||||
|
| Module Name | Type Name | Order Number | Slot/Pos | Logical Addresses |
|
||||||
|
|---|---|---|---|---|
|
||||||
|
| SAE196_c0.2.XML | CPU 1516F-3 PN/DP | 6ES7 516-3FP03-0AB0 | N/A | Input Start:501 Len:8 (Area:1)<br>Output Start:16 Len:8 (Area:1)<br>Output Start:17 Len:8 (Area:1)<br>Output Start:18 Len:8 (Area:1)<br>Output Start:19 Len:8 (Area:1)<br>Output Start:20 Len:8 (Area:1)<br>Input Start:3080 Len:40 (Area:1)<br>Input Start:3100 Len:40 (Area:1)<br>Output Start:3100 Len:8 (Area:2)<br>Input Start:2030 Len:40 (Area:1)<br>Input Start:2045 Len:40 (Area:1)<br>Input Start:2050 Len:40 (Area:1)<br>Input Start:2055 Len:40 (Area:1)<br>Input Start:2070 Len:40 (Area:1)<br>Output Start:2070 Len:8 (Area:2)<br>Input Start:3200 Len:40 (Area:1)<br>Input Start:3215 Len:40 (Area:1)<br>Input Start:3225 Len:40 (Area:1)<br>Input Start:3240 Len:40 (Area:1)<br>Output Start:3240 Len:8 (Area:2)<br>Input Start:0 Len:8 (Area:1)<br>Input Start:1 Len:8 (Area:1)<br>Input Start:2 Len:8 (Area:1)<br>Input Start:3 Len:8 (Area:1)<br>Input Start:4 Len:8 (Area:1)<br>Input Start:5 Len:8 (Area:1)<br>Input Start:6 Len:8 (Area:1)<br>Input Start:7 Len:8 (Area:1)<br>Output Start:0 Len:8 (Area:1)<br>Output Start:1 Len:8 (Area:1)<br>Output Start:2 Len:8 (Area:1)<br>Output Start:3 Len:8 (Area:1)<br>Output Start:4 Len:8 (Area:1)<br>Output Start:5 Len:8 (Area:1)<br>Output Start:6 Len:8 (Area:1)<br>Output Start:7 Len:8 (Area:1)<br>Input Start:100 Len:32 (Area:1)<br>Input Start:104 Len:32 (Area:1)<br>Input Start:108 Len:32 (Area:1)<br>Input Start:112 Len:32 (Area:1)<br>Input Start:116 Len:32 (Area:1)<br>Output Start:100 Len:32 (Area:1)<br>Output Start:104 Len:32 (Area:1)<br>Output Start:108 Len:32 (Area:1)<br>Output Start:112 Len:32 (Area:1)<br>Output Start:116 Len:32 (Area:1)<br>Output Start:120 Len:32 (Area:1)<br>Input Start:1640 Len:160 (Area:1)<br>Output Start:1640 Len:160 (Area:2)<br>Input Start:15000 Len:256 (Area:1)<br>Input Start:15032 Len:256 (Area:1)<br>Input Start:15064 Len:256 (Area:1)<br>Input Start:15096 Len:256 (Area:1)<br>Output Start:15000 Len:256 (Area:1)<br>Output Start:15032 Len:256 (Area:1)<br>Output Start:15064 Len:256 (Area:1)<br>Output Start:15096 Len:256 (Area:1) |
|
||||||
|
| Ungrouped devices | Server module | 6ES7 193-6PA00-0AA0 | N/A | Input Start:501 Len:8 (Area:1)<br>Output Start:16 Len:8 (Area:1)<br>Output Start:17 Len:8 (Area:1)<br>Output Start:18 Len:8 (Area:1)<br>Output Start:19 Len:8 (Area:1)<br>Output Start:20 Len:8 (Area:1)<br>Input Start:3080 Len:40 (Area:1)<br>Input Start:3100 Len:40 (Area:1)<br>Output Start:3100 Len:8 (Area:2)<br>Input Start:2030 Len:40 (Area:1)<br>Input Start:2045 Len:40 (Area:1)<br>Input Start:2050 Len:40 (Area:1)<br>Input Start:2055 Len:40 (Area:1)<br>Input Start:2070 Len:40 (Area:1)<br>Output Start:2070 Len:8 (Area:2)<br>Input Start:3200 Len:40 (Area:1)<br>Input Start:3215 Len:40 (Area:1)<br>Input Start:3225 Len:40 (Area:1)<br>Input Start:3240 Len:40 (Area:1)<br>Output Start:3240 Len:8 (Area:2)<br>Input Start:0 Len:8 (Area:1)<br>Input Start:1 Len:8 (Area:1)<br>Input Start:2 Len:8 (Area:1)<br>Input Start:3 Len:8 (Area:1)<br>Input Start:4 Len:8 (Area:1)<br>Input Start:5 Len:8 (Area:1)<br>Input Start:6 Len:8 (Area:1)<br>Input Start:7 Len:8 (Area:1)<br>Output Start:0 Len:8 (Area:1)<br>Output Start:1 Len:8 (Area:1)<br>Output Start:2 Len:8 (Area:1)<br>Output Start:3 Len:8 (Area:1)<br>Output Start:4 Len:8 (Area:1)<br>Output Start:5 Len:8 (Area:1)<br>Output Start:6 Len:8 (Area:1)<br>Output Start:7 Len:8 (Area:1)<br>Input Start:100 Len:32 (Area:1)<br>Input Start:104 Len:32 (Area:1)<br>Input Start:108 Len:32 (Area:1)<br>Input Start:112 Len:32 (Area:1)<br>Input Start:116 Len:32 (Area:1)<br>Output Start:100 Len:32 (Area:1)<br>Output Start:104 Len:32 (Area:1)<br>Output Start:108 Len:32 (Area:1)<br>Output Start:112 Len:32 (Area:1)<br>Output Start:116 Len:32 (Area:1)<br>Output Start:120 Len:32 (Area:1)<br>Input Start:1640 Len:160 (Area:1)<br>Output Start:1640 Len:160 (Area:2)<br>Input Start:15000 Len:256 (Area:1)<br>Input Start:15032 Len:256 (Area:1)<br>Input Start:15064 Len:256 (Area:1)<br>Input Start:15096 Len:256 (Area:1)<br>Output Start:15000 Len:256 (Area:1)<br>Output Start:15032 Len:256 (Area:1)<br>Output Start:15064 Len:256 (Area:1)<br>Output Start:15096 Len:256 (Area:1) |
|
||||||
|
| CPX-FB13: DP-Slave [Status]_1 | N/A | N/A | N/A | Input Start:501 Len:8 (Area:1) |
|
||||||
|
| MPA1S: VMPA1-FB-EMS-8 [8DO]_1 | N/A | N/A | N/A | Output Start:16 Len:8 (Area:1) |
|
||||||
|
| MPA1S: VMPA1-FB-EMS-8 [8DO]_2 | N/A | N/A | N/A | Output Start:17 Len:8 (Area:1) |
|
||||||
|
| MPA1S: VMPA1-FB-EMS-8 [8DO]_3 | N/A | N/A | N/A | Output Start:18 Len:8 (Area:1) |
|
||||||
|
| MPA1S: VMPA1-FB-EMS-8 [8DO]_4 | N/A | N/A | N/A | Output Start:19 Len:8 (Area:1) |
|
||||||
|
| MPA1S: VMPA1-FB-EMS-8 [8DO]_5 | N/A | N/A | N/A | Output Start:20 Len:8 (Area:1) |
|
||||||
|
| Volume Flow | N/A | N/A | N/A | Input Start:3080 Len:40 (Area:1) |
|
||||||
|
| Totalizer Value / Control | N/A | N/A | N/A | Input Start:3100 Len:40 (Area:1)<br>Output Start:3100 Len:8 (Area:2) |
|
||||||
|
| Mass Flow | N/A | N/A | N/A | Input Start:2030 Len:40 (Area:1) |
|
||||||
|
| Density | N/A | N/A | N/A | Input Start:2045 Len:40 (Area:1) |
|
||||||
|
| Concentration | N/A | N/A | N/A | Input Start:2050 Len:40 (Area:1) |
|
||||||
|
| Temperature | N/A | N/A | N/A | Input Start:2055 Len:40 (Area:1) |
|
||||||
|
| Totalizer Value / Control | N/A | N/A | N/A | Input Start:2070 Len:40 (Area:1)<br>Output Start:2070 Len:8 (Area:2) |
|
||||||
|
| Mass Flow | N/A | N/A | N/A | Input Start:3200 Len:40 (Area:1) |
|
||||||
|
| Density | N/A | N/A | N/A | Input Start:3215 Len:40 (Area:1) |
|
||||||
|
| Temperature | N/A | N/A | N/A | Input Start:3225 Len:40 (Area:1) |
|
||||||
|
| Totalizer Value / Control | N/A | N/A | N/A | Input Start:3240 Len:40 (Area:1)<br>Output Start:3240 Len:8 (Area:2) |
|
||||||
|
| 300A0 | N/A | N/A | N/A | Input Start:0 Len:8 (Area:1) |
|
||||||
|
| 301A0 | N/A | N/A | N/A | Input Start:1 Len:8 (Area:1) |
|
||||||
|
| 302A0 | N/A | N/A | N/A | Input Start:2 Len:8 (Area:1) |
|
||||||
|
| 303A0 | N/A | N/A | N/A | Input Start:3 Len:8 (Area:1) |
|
||||||
|
| 304A0 | N/A | N/A | N/A | Input Start:4 Len:8 (Area:1) |
|
||||||
|
| 305A0 | N/A | N/A | N/A | Input Start:5 Len:8 (Area:1) |
|
||||||
|
| 306A0 | N/A | N/A | N/A | Input Start:6 Len:8 (Area:1) |
|
||||||
|
| 307A0 | N/A | N/A | N/A | Input Start:7 Len:8 (Area:1) |
|
||||||
|
| 320A0 | N/A | N/A | N/A | Output Start:0 Len:8 (Area:1) |
|
||||||
|
| 321A0 | N/A | N/A | N/A | Output Start:1 Len:8 (Area:1) |
|
||||||
|
| 322A0 | N/A | N/A | N/A | Output Start:2 Len:8 (Area:1) |
|
||||||
|
| 323A0 | N/A | N/A | N/A | Output Start:3 Len:8 (Area:1) |
|
||||||
|
| 324A0 | N/A | N/A | N/A | Output Start:4 Len:8 (Area:1) |
|
||||||
|
| 325A0 | N/A | N/A | N/A | Output Start:5 Len:8 (Area:1) |
|
||||||
|
| 326A0 | N/A | N/A | N/A | Output Start:6 Len:8 (Area:1) |
|
||||||
|
| 327A0 | N/A | N/A | N/A | Output Start:7 Len:8 (Area:1) |
|
||||||
|
| 340A1 | N/A | N/A | N/A | Input Start:100 Len:32 (Area:1) |
|
||||||
|
| 341A1 | N/A | N/A | N/A | Input Start:104 Len:32 (Area:1) |
|
||||||
|
| 342A1 | N/A | N/A | N/A | Input Start:108 Len:32 (Area:1) |
|
||||||
|
| 343A1 | N/A | N/A | N/A | Input Start:112 Len:32 (Area:1) |
|
||||||
|
| 344A1 | N/A | N/A | N/A | Input Start:116 Len:32 (Area:1) |
|
||||||
|
| 350A1 | N/A | N/A | N/A | Output Start:100 Len:32 (Area:1) |
|
||||||
|
| 351A1 | N/A | N/A | N/A | Output Start:104 Len:32 (Area:1) |
|
||||||
|
| 352A1 | N/A | N/A | N/A | Output Start:108 Len:32 (Area:1) |
|
||||||
|
| 353A1 | N/A | N/A | N/A | Output Start:112 Len:32 (Area:1) |
|
||||||
|
| 354A1 | N/A | N/A | N/A | Output Start:116 Len:32 (Area:1) |
|
||||||
|
| 355A1 | N/A | N/A | N/A | Output Start:120 Len:32 (Area:1) |
|
||||||
|
| PPO Type 8 Module consistent PCD_1 | N/A | N/A | N/A | Input Start:1640 Len:160 (Area:1)<br>Output Start:1640 Len:160 (Area:2) |
|
||||||
|
| IN128 mPDS5>PLC_4_1 | N/A | N/A | N/A | Input Start:15000 Len:256 (Area:1) |
|
||||||
|
| IN128 mPDS5>PLC_4_2 | N/A | N/A | N/A | Input Start:15032 Len:256 (Area:1) |
|
||||||
|
| IN128 mPDS5>PLC_4_3 | N/A | N/A | N/A | Input Start:15064 Len:256 (Area:1) |
|
||||||
|
| IN128 mPDS5>PLC_4_4 | N/A | N/A | N/A | Input Start:15096 Len:256 (Area:1) |
|
||||||
|
| OUT128 PLC>mPDS5_4_1 | N/A | N/A | N/A | Output Start:15000 Len:256 (Area:1) |
|
||||||
|
| OUT128 PLC>mPDS5_4_2 | N/A | N/A | N/A | Output Start:15032 Len:256 (Area:1) |
|
||||||
|
| OUT128 PLC>mPDS5_4_3 | N/A | N/A | N/A | Output Start:15064 Len:256 (Area:1) |
|
||||||
|
| OUT128 PLC>mPDS5_4_4 | N/A | N/A | N/A | Output Start:15096 Len:256 (Area:1) |
|
||||||
|
|
||||||
|
## Connections / IO Tag Links
|
||||||
|
| Link Name | Source (Device::Channel/Interface) | Target (Device::Tag/Interface) |
|
||||||
|
|---|---|---|
|
||||||
|
| Link To Subnet_1 | `E1::LogicalEndPoint_Node` | `ETHERNET_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_2 | `P1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_3 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_4 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_5 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_6 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_7 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_8 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To Subnet_9 | `PB1::LogicalEndPoint_Node` | `PROFIBUS_1::LogicalEndPoint_Subnet` |
|
||||||
|
| Link To IoSystem_1 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_2 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_3 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_4 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_5 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_6 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To IoSystem_7 | `DP interface::LogicalEndPoint_Interface` | `DP-Mastersystem::LogicalEndPoint_IoSystem` |
|
||||||
|
| Link To Tag_1 | `300A0::Channel_DI_0` | `InputsMaster::DI_AuxVoltage_On` |
|
||||||
|
| Link To Tag_2 | `300A0::Channel_DI_1` | `InputsMaster::DI_PB_HornReset` |
|
||||||
|
| Link To Tag_3 | `300A0::Channel_DI_2` | `InputsMaster::DI_AlarmReset` |
|
||||||
|
| Link To Tag_4 | `300A0::Channel_DI_3` | `InputsMaster::DI_PB_Machine_Stop` |
|
||||||
|
| Link To Tag_5 | `300A0::Channel_DI_4` | `InputsMaster::DI_PB_Machine_Start` |
|
||||||
|
| Link To Tag_6 | `300A0::Channel_DI_5` | `Inputs::DI_Emergency_Pilz_On` |
|
||||||
|
| Link To Tag_7 | `300A0::Channel_DI_6` | `Inputs::DI_LSN301L` |
|
||||||
|
| Link To Tag_8 | `300A0::Channel_DI_7` | `InputsMaster::DI_Min_Syrup_Level` |
|
||||||
|
| Link To Tag_9 | `301A0::Channel_DI_0` | `Inputs::DI_LSM302L` |
|
||||||
|
| Link To Tag_10 | `301A0::Channel_DI_5` | `Inputs::DI_RMM301_Closed` |
|
||||||
|
| Link To Tag_11 | `301A0::Channel_DI_6` | `Inputs::DI_RMP302_Closed` |
|
||||||
|
| Link To Tag_12 | `301A0::Channel_DI_7` | `Inputs::DI_RMM303_Closed` |
|
||||||
|
| Link To Tag_13 | `302A0::Channel_DI_0` | `InputsMaster::DI_PPN301_Ovrld` |
|
||||||
|
| Link To Tag_14 | `302A0::Channel_DI_1` | `InputsMaster::DI_PPN301_SoftStOvr` |
|
||||||
|
| Link To Tag_15 | `302A0::Channel_DI_2` | `Inputs::DI_PPP302_Ovrld` |
|
||||||
|
| Link To Tag_16 | `302A0::Channel_DI_3` | `Inputs::DI_PPP302_Contactor` |
|
||||||
|
| Link To Tag_17 | `302A0::Channel_DI_4` | `Inputs::DI_PPM303_Ovrld` |
|
||||||
|
| Link To Tag_18 | `302A0::Channel_DI_5` | `InputsMaster::DI_PPM303_Contactor` |
|
||||||
|
| Link To Tag_19 | `303A0::Channel_DI_5` | `Inputs::DI_UPSAlarm` |
|
||||||
|
| Link To Tag_20 | `303A0::Channel_DI_6` | `Inputs::DI_UPSsupply` |
|
||||||
|
| Link To Tag_21 | `303A0::Channel_DI_7` | `Inputs::DI_UPSBatteryReady` |
|
||||||
|
| Link To Tag_22 | `304A0::Channel_DI_3` | `Inputs::DI_Emergency_Pressed` |
|
||||||
|
| Link To Tag_23 | `304A0::Channel_DI_4` | `InputsMaster::DI_Log_Sidel` |
|
||||||
|
| Link To Tag_24 | `305A0::Channel_DI_0` | `Inputs::DI_MaxTempAlarm` |
|
||||||
|
| Link To Tag_25 | `305A0::Channel_DI_0` | `Inputs::DI_SyrRoom_SyrPump_Running` |
|
||||||
|
| Link To Tag_26 | `307A0::Channel_DI_1` | `Inputs::DI_Air_InletPress_OK` |
|
||||||
|
| Link To Tag_27 | `307A0::Channel_DI_2` | `InputsMaster::DI_HVP301_Sensor` |
|
||||||
|
| Link To Tag_28 | `307A0::Channel_DI_3` | `InputsMaster::DI_FSS301` |
|
||||||
|
| Link To Tag_29 | `320A0::Channel_DO_0` | `Outputs::DO_HMIPowerSupply` |
|
||||||
|
| Link To Tag_30 | `321A0::Channel_DO_0` | `OutputsMaster::DO_SyRm_SyrupReques` |
|
||||||
|
| Link To Tag_31 | `321A0::Channel_DO_1` | `OutputsMaster::DO_SyrupRoomWaterReq` |
|
||||||
|
| Link To Tag_32 | `325A0::Channel_DO_0` | `Memories::AB5` |
|
||||||
|
| Link To Tag_33 | `325A0::Channel_DO_0` | `Memories::Tag_1` |
|
||||||
|
| Link To Tag_34 | `325A0::Channel_DO_1` | `Memories::AB5` |
|
||||||
|
| ... | ... | ... |
|
|
@ -0,0 +1,258 @@
|
||||||
|
import tkinter as tk
|
||||||
|
from tkinter import filedialog
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
import xml.etree.ElementTree as ET # Library to parse XML (AML)
|
||||||
|
|
||||||
|
# --- Configuration ---
|
||||||
|
TIA_PORTAL_VERSION = "18.0" # Target TIA Portal version
|
||||||
|
|
||||||
|
# --- TIA Scripting Import Handling ---
|
||||||
|
# (Same import handling as the previous script)
|
||||||
|
if os.getenv('TIA_SCRIPTING'):
|
||||||
|
sys.path.append(os.getenv('TIA_SCRIPTING'))
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
import siemens_tia_scripting as ts
|
||||||
|
except ImportError:
|
||||||
|
print("ERROR: Failed to import 'siemens_tia_scripting'.")
|
||||||
|
print("Ensure TIA Openness, the module, and Python 3.12.X are set up.")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An unexpected error occurred during import: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# --- Functions ---
|
||||||
|
|
||||||
|
def select_project_file():
|
||||||
|
"""Opens a dialog to select a TIA Portal project file."""
|
||||||
|
root = tk.Tk()
|
||||||
|
root.withdraw()
|
||||||
|
file_path = filedialog.askopenfilename(
|
||||||
|
title="Select TIA Portal Project File",
|
||||||
|
filetypes=[(f"TIA Portal V{TIA_PORTAL_VERSION} Projects", f"*.ap{TIA_PORTAL_VERSION.split('.')[0]}")]
|
||||||
|
)
|
||||||
|
root.destroy()
|
||||||
|
if not file_path:
|
||||||
|
print("No project file selected. Exiting.")
|
||||||
|
sys.exit(0)
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
def select_output_directory():
|
||||||
|
"""Opens a dialog to select the output directory."""
|
||||||
|
root = tk.Tk()
|
||||||
|
root.withdraw()
|
||||||
|
dir_path = filedialog.askdirectory(
|
||||||
|
title="Select Output Directory for AML and MD files"
|
||||||
|
)
|
||||||
|
root.destroy()
|
||||||
|
if not dir_path:
|
||||||
|
print("No output directory selected. Exiting.")
|
||||||
|
sys.exit(0)
|
||||||
|
return dir_path
|
||||||
|
|
||||||
|
def find_elements(element, path):
|
||||||
|
"""Helper to find elements using namespaces commonly found in AML."""
|
||||||
|
# AutomationML namespaces often vary slightly or might be default
|
||||||
|
# This basic approach tries common prefixes or no prefix
|
||||||
|
namespaces = {
|
||||||
|
'': element.tag.split('}')[0][1:] if '}' in element.tag else '', # Default namespace if present
|
||||||
|
'caex': 'http://www.dke.de/CAEX', # Common CAEX namespace
|
||||||
|
# Add other potential namespaces if needed based on file inspection
|
||||||
|
}
|
||||||
|
# Try finding with common prefixes or the default namespace
|
||||||
|
for prefix, uri in namespaces.items():
|
||||||
|
# Construct path with namespace URI if prefix is defined
|
||||||
|
namespaced_path = path
|
||||||
|
if prefix:
|
||||||
|
parts = path.split('/')
|
||||||
|
namespaced_parts = [f"{{{uri}}}{part}" if part != '.' else part for part in parts]
|
||||||
|
namespaced_path = '/'.join(namespaced_parts)
|
||||||
|
|
||||||
|
# Try findall with the constructed path
|
||||||
|
found = element.findall(namespaced_path)
|
||||||
|
if found:
|
||||||
|
return found # Return first successful find
|
||||||
|
|
||||||
|
# Fallback: try finding without explicit namespace (might work if default ns is used throughout)
|
||||||
|
# This might require adjusting the path string itself depending on the XML structure
|
||||||
|
try:
|
||||||
|
# Simple attempt without namespace handling if the above fails
|
||||||
|
return element.findall(path)
|
||||||
|
except SyntaxError: # Handle potential errors if path isn't valid without namespaces
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def parse_aml_to_markdown(aml_file_path, md_file_path):
|
||||||
|
"""Parses the AML file and generates a Markdown summary."""
|
||||||
|
print(f"Parsing AML file: {aml_file_path}")
|
||||||
|
try:
|
||||||
|
tree = ET.parse(aml_file_path)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
markdown_lines = ["# Project CAx Data Summary (AutomationML)", ""]
|
||||||
|
|
||||||
|
# Find InstanceHierarchy - usually contains the project structure
|
||||||
|
# Note: Namespace handling in ElementTree can be tricky. Adjust '{...}' part if needed.
|
||||||
|
# We will use a helper function 'find_elements' to try common patterns
|
||||||
|
instance_hierarchies = find_elements(root, './/InstanceHierarchy') # Common CAEX tag
|
||||||
|
|
||||||
|
if not instance_hierarchies:
|
||||||
|
markdown_lines.append("Could not find InstanceHierarchy in the AML file.")
|
||||||
|
print("Warning: Could not find InstanceHierarchy element.")
|
||||||
|
else:
|
||||||
|
# Assuming the first InstanceHierarchy is the main one
|
||||||
|
ih = instance_hierarchies[0]
|
||||||
|
markdown_lines.append(f"## Instance Hierarchy: {ih.get('Name', 'N/A')}")
|
||||||
|
markdown_lines.append("")
|
||||||
|
|
||||||
|
# Look for InternalElements which represent devices/components
|
||||||
|
internal_elements = find_elements(ih, './/InternalElement') # Common CAEX tag
|
||||||
|
|
||||||
|
if not internal_elements:
|
||||||
|
markdown_lines.append("No devices (InternalElement) found in InstanceHierarchy.")
|
||||||
|
print("Info: No InternalElement tags found under InstanceHierarchy.")
|
||||||
|
else:
|
||||||
|
markdown_lines.append(f"Found {len(internal_elements)} device(s)/component(s):")
|
||||||
|
markdown_lines.append("")
|
||||||
|
markdown_lines.append("| Name | SystemUnitClass | RefBaseSystemUnitPath | Attributes |")
|
||||||
|
markdown_lines.append("|---|---|---|---|")
|
||||||
|
|
||||||
|
for elem in internal_elements:
|
||||||
|
name = elem.get('Name', 'N/A')
|
||||||
|
ref_path = elem.get('RefBaseSystemUnitPath', 'N/A') # Path to class definition
|
||||||
|
|
||||||
|
# Try to get the class name from the RefBaseSystemUnitPath or SystemUnitClassLib
|
||||||
|
su_class_path = find_elements(elem, './/SystemUnitClass') # Check direct child first
|
||||||
|
su_class = su_class_path[0].get('Path', 'N/A') if su_class_path else ref_path.split('/')[-1] # Fallback to last part of path
|
||||||
|
|
||||||
|
attributes_md = ""
|
||||||
|
attributes = find_elements(elem, './/Attribute') # Find attributes
|
||||||
|
attr_list = []
|
||||||
|
for attr in attributes:
|
||||||
|
attr_name = attr.get('Name', '')
|
||||||
|
attr_value_elem = find_elements(attr, './/Value') # Get Value element
|
||||||
|
attr_value = attr_value_elem[0].text if attr_value_elem and attr_value_elem[0].text else 'N/A'
|
||||||
|
|
||||||
|
# Look for potential IP addresses (common attribute names)
|
||||||
|
if "Address" in attr_name or "IP" in attr_name:
|
||||||
|
attr_list.append(f"**{attr_name}**: {attr_value}")
|
||||||
|
else:
|
||||||
|
attr_list.append(f"{attr_name}: {attr_value}")
|
||||||
|
|
||||||
|
attributes_md = "<br>".join(attr_list) if attr_list else "None"
|
||||||
|
|
||||||
|
|
||||||
|
markdown_lines.append(f"| {name} | {su_class} | `{ref_path}` | {attributes_md} |")
|
||||||
|
|
||||||
|
# Write to Markdown file
|
||||||
|
with open(md_file_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write("\n".join(markdown_lines))
|
||||||
|
print(f"Markdown summary written to: {md_file_path}")
|
||||||
|
|
||||||
|
except ET.ParseError as xml_err:
|
||||||
|
print(f"ERROR parsing XML file {aml_file_path}: {xml_err}")
|
||||||
|
with open(md_file_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(f"# Error\n\nFailed to parse AML file: {os.path.basename(aml_file_path)}\n\nError: {xml_err}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR processing AML file {aml_file_path}: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
with open(md_file_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(f"# Error\n\nAn unexpected error occurred while processing AML file: {os.path.basename(aml_file_path)}\n\nError: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Main Script ---
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("--- TIA Portal Project CAx Exporter and Analyzer ---")
|
||||||
|
|
||||||
|
# 1. Select Files/Folders
|
||||||
|
project_file = select_project_file()
|
||||||
|
output_dir = select_output_directory()
|
||||||
|
|
||||||
|
print(f"\nSelected Project: {project_file}")
|
||||||
|
print(f"Selected Output Directory: {output_dir}")
|
||||||
|
|
||||||
|
# Define output file names
|
||||||
|
project_base_name = os.path.splitext(os.path.basename(project_file))[0]
|
||||||
|
aml_file = os.path.join(output_dir, f"{project_base_name}_CAx_Export.aml")
|
||||||
|
md_file = os.path.join(output_dir, f"{project_base_name}_CAx_Summary.md")
|
||||||
|
log_file = os.path.join(output_dir, f"{project_base_name}_CAx_Export.log") # Log file for the export process
|
||||||
|
|
||||||
|
print(f"Will export CAx data to: {aml_file}")
|
||||||
|
print(f"Will generate summary to: {md_file}")
|
||||||
|
print(f"Export log file: {log_file}")
|
||||||
|
|
||||||
|
|
||||||
|
portal_instance = None
|
||||||
|
project_object = None
|
||||||
|
cax_export_successful = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 2. Connect to TIA Portal
|
||||||
|
print(f"\nConnecting to TIA Portal V{TIA_PORTAL_VERSION}...")
|
||||||
|
portal_instance = ts.open_portal(
|
||||||
|
version=TIA_PORTAL_VERSION,
|
||||||
|
portal_mode=ts.Enums.PortalMode.WithGraphicalUserInterface
|
||||||
|
)
|
||||||
|
print("Connected.")
|
||||||
|
|
||||||
|
# 3. Open Project
|
||||||
|
print(f"Opening project: {os.path.basename(project_file)}...")
|
||||||
|
project_object = portal_instance.open_project(project_file_path=project_file)
|
||||||
|
if project_object is None:
|
||||||
|
project_object = portal_instance.get_project()
|
||||||
|
if project_object is None:
|
||||||
|
raise Exception("Failed to open or get the specified project.")
|
||||||
|
print("Project opened.")
|
||||||
|
|
||||||
|
# 4. Export CAx Data (Project Level)
|
||||||
|
print(f"Exporting CAx data for the project to {aml_file}...")
|
||||||
|
# Ensure output directory exists for the log file as well
|
||||||
|
os.makedirs(os.path.dirname(log_file), exist_ok=True)
|
||||||
|
|
||||||
|
export_result = project_object.export_cax_data(export_file_path=aml_file, log_file_path=log_file) # [cite: 361]
|
||||||
|
|
||||||
|
if export_result:
|
||||||
|
print("CAx data exported successfully.")
|
||||||
|
cax_export_successful = True
|
||||||
|
else:
|
||||||
|
print("CAx data export failed. Check the log file for details:")
|
||||||
|
print(f" Log file: {log_file}")
|
||||||
|
# Write basic error message to MD file if export fails
|
||||||
|
with open(md_file, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(f"# Error\n\nCAx data export failed. Check log file: {log_file}")
|
||||||
|
|
||||||
|
|
||||||
|
except ts.TiaException as tia_ex:
|
||||||
|
print(f"\nTIA Portal Openness Error: {tia_ex}")
|
||||||
|
traceback.print_exc()
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"\nERROR: Project file not found at {project_file}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nAn unexpected error occurred during TIA interaction: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
finally:
|
||||||
|
# Close TIA Portal before processing the file (or detach)
|
||||||
|
if portal_instance:
|
||||||
|
try:
|
||||||
|
print("\nClosing TIA Portal...")
|
||||||
|
portal_instance.close_portal()
|
||||||
|
print("TIA Portal closed.")
|
||||||
|
except Exception as close_ex:
|
||||||
|
print(f"Error during TIA Portal cleanup: {close_ex}")
|
||||||
|
|
||||||
|
# 5. Parse AML and Generate Markdown (only if export was successful)
|
||||||
|
if cax_export_successful:
|
||||||
|
if os.path.exists(aml_file):
|
||||||
|
parse_aml_to_markdown(aml_file, md_file)
|
||||||
|
else:
|
||||||
|
print(f"ERROR: Export was reported successful, but AML file not found at {aml_file}")
|
||||||
|
with open(md_file, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(f"# Error\n\nExport was reported successful, but AML file not found:\n{aml_file}")
|
||||||
|
|
||||||
|
print("\nScript finished.")
|
|
@ -0,0 +1,265 @@
|
||||||
|
import tkinter as tk
|
||||||
|
from tkinter import filedialog
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
# --- Configuration ---
|
||||||
|
TIA_PORTAL_VERSION = "18.0" # Target TIA Portal version (e.g., "18.0")
|
||||||
|
EXPORT_OPTIONS = None # Use default export options
|
||||||
|
KEEP_FOLDER_STRUCTURE = True # Replicate TIA project folder structure in export directory
|
||||||
|
|
||||||
|
# --- TIA Scripting Import Handling ---
|
||||||
|
# Check if the TIA_SCRIPTING environment variable is set
|
||||||
|
if os.getenv('TIA_SCRIPTING'):
|
||||||
|
sys.path.append(os.getenv('TIA_SCRIPTING'))
|
||||||
|
else:
|
||||||
|
# Optional: Define a fallback path if the environment variable isn't set
|
||||||
|
# fallback_path = "C:\\path\\to\\your\\TIA_Scripting_binaries"
|
||||||
|
# if os.path.exists(fallback_path):
|
||||||
|
# sys.path.append(fallback_path)
|
||||||
|
pass # Allow import to fail if not found
|
||||||
|
|
||||||
|
try:
|
||||||
|
import siemens_tia_scripting as ts
|
||||||
|
EXPORT_OPTIONS = ts.Enums.ExportOptions.WithDefaults # Set default options now that 'ts' is imported
|
||||||
|
except ImportError:
|
||||||
|
print("ERROR: Failed to import 'siemens_tia_scripting'.")
|
||||||
|
print("Ensure:")
|
||||||
|
print(f"1. TIA Portal Openness for V{TIA_PORTAL_VERSION} is installed.")
|
||||||
|
print("2. The 'siemens_tia_scripting' Python module is installed (pip install ...) or")
|
||||||
|
print(" the path to its binaries is set in the 'TIA_SCRIPTING' environment variable.")
|
||||||
|
print("3. You are using a compatible Python version (e.g., 3.12.X as per documentation).")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An unexpected error occurred during import: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# --- Functions ---
|
||||||
|
|
||||||
|
def select_project_file():
|
||||||
|
"""Opens a dialog to select a TIA Portal project file."""
|
||||||
|
root = tk.Tk()
|
||||||
|
root.withdraw() # Hide the main tkinter window
|
||||||
|
file_path = filedialog.askopenfilename(
|
||||||
|
title="Select TIA Portal Project File",
|
||||||
|
filetypes=[(f"TIA Portal V{TIA_PORTAL_VERSION} Projects", f"*.ap{TIA_PORTAL_VERSION.split('.')[0]}")] # e.g. *.ap18
|
||||||
|
)
|
||||||
|
root.destroy()
|
||||||
|
if not file_path:
|
||||||
|
print("No project file selected. Exiting.")
|
||||||
|
sys.exit(0)
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
def select_export_directory():
|
||||||
|
"""Opens a dialog to select the export directory."""
|
||||||
|
root = tk.Tk()
|
||||||
|
root.withdraw() # Hide the main tkinter window
|
||||||
|
dir_path = filedialog.askdirectory(
|
||||||
|
title="Select Export Directory"
|
||||||
|
)
|
||||||
|
root.destroy()
|
||||||
|
if not dir_path:
|
||||||
|
print("No export directory selected. Exiting.")
|
||||||
|
sys.exit(0)
|
||||||
|
return dir_path
|
||||||
|
|
||||||
|
def export_plc_data(plc, export_base_dir):
|
||||||
|
"""Exports Blocks, UDTs, and Tag Tables from a given PLC."""
|
||||||
|
plc_name = plc.get_name()
|
||||||
|
print(f"\n--- Processing PLC: {plc_name} ---")
|
||||||
|
|
||||||
|
# Define base export path for this PLC
|
||||||
|
plc_export_dir = os.path.join(export_base_dir, plc_name)
|
||||||
|
os.makedirs(plc_export_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# --- Export Program Blocks ---
|
||||||
|
blocks_exported = 0
|
||||||
|
blocks_skipped = 0
|
||||||
|
print(f"\n[PLC: {plc_name}] Exporting Program Blocks...")
|
||||||
|
xml_blocks_path = os.path.join(plc_export_dir, "ProgramBlocks_XML")
|
||||||
|
scl_blocks_path = os.path.join(plc_export_dir, "ProgramBlocks_SCL")
|
||||||
|
os.makedirs(xml_blocks_path, exist_ok=True)
|
||||||
|
os.makedirs(scl_blocks_path, exist_ok=True)
|
||||||
|
print(f" XML Target: {xml_blocks_path}")
|
||||||
|
print(f" SCL Target: {scl_blocks_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
program_blocks = plc.get_program_blocks() #
|
||||||
|
print(f" Found {len(program_blocks)} program blocks.")
|
||||||
|
for block in program_blocks:
|
||||||
|
block_name = block.get_name() # Assuming get_name() exists
|
||||||
|
print(f" Processing block: {block_name}...")
|
||||||
|
try:
|
||||||
|
if not block.is_consistent(): #
|
||||||
|
print(f" Compiling block {block_name}...")
|
||||||
|
block.compile() #
|
||||||
|
if not block.is_consistent():
|
||||||
|
print(f" WARNING: Block {block_name} inconsistent after compile. Skipping.")
|
||||||
|
blocks_skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f" Exporting {block_name} as XML...")
|
||||||
|
block.export(target_directory_path=xml_blocks_path, #
|
||||||
|
export_options=EXPORT_OPTIONS, #
|
||||||
|
export_format=ts.Enums.ExportFormats.SimaticML, #
|
||||||
|
keep_folder_structure=KEEP_FOLDER_STRUCTURE) #
|
||||||
|
|
||||||
|
try:
|
||||||
|
prog_language = block.get_property(name="ProgrammingLanguage")
|
||||||
|
if prog_language == "SCL":
|
||||||
|
print(f" Exporting {block_name} as SCL...")
|
||||||
|
block.export(target_directory_path=scl_blocks_path,
|
||||||
|
export_options=EXPORT_OPTIONS,
|
||||||
|
export_format=ts.Enums.ExportFormats.ExternalSource, #
|
||||||
|
keep_folder_structure=KEEP_FOLDER_STRUCTURE)
|
||||||
|
except Exception as prop_ex:
|
||||||
|
print(f" Could not get ProgrammingLanguage for {block_name}. Skipping SCL. Error: {prop_ex}")
|
||||||
|
|
||||||
|
blocks_exported += 1
|
||||||
|
except Exception as block_ex:
|
||||||
|
print(f" ERROR exporting block {block_name}: {block_ex}")
|
||||||
|
blocks_skipped += 1
|
||||||
|
print(f" Program Blocks Export Summary: Exported={blocks_exported}, Skipped/Errors={blocks_skipped}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ERROR processing Program Blocks: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
# --- Export PLC Data Types (UDTs) ---
|
||||||
|
udts_exported = 0
|
||||||
|
udts_skipped = 0
|
||||||
|
print(f"\n[PLC: {plc_name}] Exporting PLC Data Types (UDTs)...")
|
||||||
|
udt_export_path = os.path.join(plc_export_dir, "PlcDataTypes")
|
||||||
|
os.makedirs(udt_export_path, exist_ok=True)
|
||||||
|
print(f" Target: {udt_export_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
udts = plc.get_user_data_types() #
|
||||||
|
print(f" Found {len(udts)} UDTs.")
|
||||||
|
for udt in udts:
|
||||||
|
udt_name = udt.get_name() #
|
||||||
|
print(f" Processing UDT: {udt_name}...")
|
||||||
|
try:
|
||||||
|
if not udt.is_consistent(): #
|
||||||
|
print(f" Compiling UDT {udt_name}...")
|
||||||
|
udt.compile() #
|
||||||
|
if not udt.is_consistent():
|
||||||
|
print(f" WARNING: UDT {udt_name} inconsistent after compile. Skipping.")
|
||||||
|
udts_skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f" Exporting {udt_name}...")
|
||||||
|
udt.export(target_directory_path=udt_export_path, #
|
||||||
|
export_options=EXPORT_OPTIONS, #
|
||||||
|
# export_format defaults to SimaticML for UDTs
|
||||||
|
keep_folder_structure=KEEP_FOLDER_STRUCTURE) #
|
||||||
|
udts_exported += 1
|
||||||
|
except Exception as udt_ex:
|
||||||
|
print(f" ERROR exporting UDT {udt_name}: {udt_ex}")
|
||||||
|
udts_skipped += 1
|
||||||
|
print(f" UDT Export Summary: Exported={udts_exported}, Skipped/Errors={udts_skipped}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ERROR processing UDTs: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
# --- Export PLC Tag Tables ---
|
||||||
|
tags_exported = 0
|
||||||
|
tags_skipped = 0
|
||||||
|
print(f"\n[PLC: {plc_name}] Exporting PLC Tag Tables...")
|
||||||
|
tags_export_path = os.path.join(plc_export_dir, "PlcTags")
|
||||||
|
os.makedirs(tags_export_path, exist_ok=True)
|
||||||
|
print(f" Target: {tags_export_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
tag_tables = plc.get_plc_tag_tables() #
|
||||||
|
print(f" Found {len(tag_tables)} Tag Tables.")
|
||||||
|
for table in tag_tables:
|
||||||
|
table_name = table.get_name() #
|
||||||
|
print(f" Processing Tag Table: {table_name}...")
|
||||||
|
try:
|
||||||
|
# Note: Consistency check might not be available/needed for tag tables like blocks/UDTs
|
||||||
|
print(f" Exporting {table_name}...")
|
||||||
|
table.export(target_directory_path=tags_export_path, #
|
||||||
|
export_options=EXPORT_OPTIONS, #
|
||||||
|
# export_format defaults to SimaticML for Tag Tables
|
||||||
|
keep_folder_structure=KEEP_FOLDER_STRUCTURE) #
|
||||||
|
tags_exported += 1
|
||||||
|
except Exception as table_ex:
|
||||||
|
print(f" ERROR exporting Tag Table {table_name}: {table_ex}")
|
||||||
|
tags_skipped += 1
|
||||||
|
print(f" Tag Table Export Summary: Exported={tags_exported}, Skipped/Errors={tags_skipped}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ERROR processing Tag Tables: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
print(f"\n--- Finished processing PLC: {plc_name} ---")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Main Script ---
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("--- TIA Portal Data Exporter (Blocks, UDTs, Tags) ---")
|
||||||
|
|
||||||
|
# 1. Select Files/Folders
|
||||||
|
project_file = select_project_file()
|
||||||
|
export_dir = select_export_directory()
|
||||||
|
|
||||||
|
print(f"\nSelected Project: {project_file}")
|
||||||
|
print(f"Selected Export Directory: {export_dir}")
|
||||||
|
|
||||||
|
portal_instance = None
|
||||||
|
project_object = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 2. Connect to TIA Portal
|
||||||
|
print(f"\nConnecting to TIA Portal V{TIA_PORTAL_VERSION}...")
|
||||||
|
portal_instance = ts.open_portal(
|
||||||
|
version=TIA_PORTAL_VERSION,
|
||||||
|
portal_mode=ts.Enums.PortalMode.WithGraphicalUserInterface
|
||||||
|
)
|
||||||
|
print("Connected to TIA Portal.")
|
||||||
|
print(f"Portal Process ID: {portal_instance.get_process_id()}") #
|
||||||
|
|
||||||
|
# 3. Open Project
|
||||||
|
print(f"Opening project: {os.path.basename(project_file)}...")
|
||||||
|
project_object = portal_instance.open_project(project_file_path=project_file) #
|
||||||
|
if project_object is None:
|
||||||
|
print("Project might already be open, attempting to get handle...")
|
||||||
|
project_object = portal_instance.get_project() #
|
||||||
|
if project_object is None:
|
||||||
|
raise Exception("Failed to open or get the specified project.")
|
||||||
|
print("Project opened successfully.")
|
||||||
|
|
||||||
|
# 4. Get PLCs
|
||||||
|
plcs = project_object.get_plcs() #
|
||||||
|
if not plcs:
|
||||||
|
print("No PLC devices found in the project.")
|
||||||
|
else:
|
||||||
|
print(f"Found {len(plcs)} PLC(s). Starting export process...")
|
||||||
|
|
||||||
|
# 5. Iterate and Export Data for each PLC
|
||||||
|
for plc_device in plcs:
|
||||||
|
export_plc_data(plc=plc_device, export_base_dir=export_dir)
|
||||||
|
|
||||||
|
print("\nExport process completed.")
|
||||||
|
|
||||||
|
except ts.TiaException as tia_ex:
|
||||||
|
print(f"\nTIA Portal Openness Error: {tia_ex}")
|
||||||
|
traceback.print_exc()
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"\nERROR: Project file not found at {project_file}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nAn unexpected error occurred: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
finally:
|
||||||
|
# 6. Cleanup
|
||||||
|
if portal_instance:
|
||||||
|
try:
|
||||||
|
print("\nClosing TIA Portal...")
|
||||||
|
portal_instance.close_portal() #
|
||||||
|
print("TIA Portal closed.")
|
||||||
|
except Exception as close_ex:
|
||||||
|
print(f"Error during TIA Portal cleanup: {close_ex}")
|
||||||
|
|
||||||
|
print("\nScript finished.")
|
|
@ -0,0 +1,335 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
# Import lxml
|
||||||
|
from lxml import etree as ET
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
|
||||||
|
# --- Configuration ---
|
||||||
|
# (No changes needed here)
|
||||||
|
|
||||||
|
|
||||||
|
# CORRECTED function from v6
|
||||||
|
def extract_aml_data_v7(root):
|
||||||
|
"""Extracts device information using lxml and local-name() XPath. (Corrected)"""
|
||||||
|
project_data = {"devices": {}, "connections": []} # Use dict for devices
|
||||||
|
|
||||||
|
instance_hierarchies = root.xpath(".//*[local-name()='InstanceHierarchy']")
|
||||||
|
if not instance_hierarchies:
|
||||||
|
print("ERROR: Could not find 'InstanceHierarchy'.")
|
||||||
|
return project_data
|
||||||
|
ih = instance_hierarchies[0]
|
||||||
|
print(f"Processing InstanceHierarchy: {ih.get('Name', 'N/A')}")
|
||||||
|
|
||||||
|
internal_elements = ih.xpath(".//*[local-name()='InternalElement']")
|
||||||
|
print(f"Found {len(internal_elements)} InternalElement(s). Analyzing...")
|
||||||
|
|
||||||
|
# --- Device Loop ---
|
||||||
|
for elem in internal_elements:
|
||||||
|
elem_id = elem.get("ID", None)
|
||||||
|
if not elem_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
device_info = {
|
||||||
|
"name": elem.get("Name", "N/A"),
|
||||||
|
"id": elem_id,
|
||||||
|
"class": "N/A",
|
||||||
|
"type_identifier": "N/A",
|
||||||
|
"order_number": "N/A",
|
||||||
|
"type_name": "N/A",
|
||||||
|
"firmware_version": "N/A", # Added firmware field
|
||||||
|
"position": elem.get("PositionNumber", "N/A"),
|
||||||
|
"attributes": {},
|
||||||
|
"interfaces": [],
|
||||||
|
"network_nodes": [],
|
||||||
|
"io_addresses": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get Device Class/Type
|
||||||
|
class_tag = elem.xpath("./*[local-name()='SystemUnitClass']")
|
||||||
|
if class_tag:
|
||||||
|
device_info["class"] = class_tag[0].get(
|
||||||
|
"Path", elem.get("RefBaseSystemUnitPath", "N/A")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
device_info["class"] = elem.get("RefBaseSystemUnitPath", "N/A")
|
||||||
|
|
||||||
|
# Extract Attributes
|
||||||
|
attributes = elem.xpath(
|
||||||
|
"./*[local-name()='Attribute']"
|
||||||
|
) # Direct attributes first
|
||||||
|
if not attributes:
|
||||||
|
attributes = elem.xpath(".//*[local-name()='Attribute']") # Fallback nested
|
||||||
|
|
||||||
|
for attr in attributes:
|
||||||
|
attr_name = attr.get("Name", "")
|
||||||
|
# Get text value of the direct child 'Value' tag
|
||||||
|
value_elem = attr.xpath(
|
||||||
|
"./*[local-name()='Value']/text()"
|
||||||
|
) # CORRECT variable name
|
||||||
|
attr_value = (
|
||||||
|
value_elem[0] if value_elem else ""
|
||||||
|
) # USE CORRECT variable name here
|
||||||
|
|
||||||
|
# Store common identifying attributes directly
|
||||||
|
if attr_name == "TypeIdentifier":
|
||||||
|
device_info["type_identifier"] = attr_value
|
||||||
|
if "OrderNumber:" in attr_value:
|
||||||
|
device_info["order_number"] = attr_value.split("OrderNumber:")[-1]
|
||||||
|
elif attr_name == "TypeName":
|
||||||
|
device_info["type_name"] = attr_value
|
||||||
|
elif attr_name == "FirmwareVersion":
|
||||||
|
device_info["firmware_version"] = attr_value
|
||||||
|
|
||||||
|
# Store all attributes for reference
|
||||||
|
device_info["attributes"][attr_name] = attr_value
|
||||||
|
|
||||||
|
# Extract Detailed IO Addresses
|
||||||
|
if attr_name == "Address":
|
||||||
|
address_parts = attr.xpath("./*[local-name()='Attribute']")
|
||||||
|
for part in address_parts:
|
||||||
|
addr_details = {
|
||||||
|
"area": part.get("Name", "?"),
|
||||||
|
"start": "N/A",
|
||||||
|
"length": "N/A",
|
||||||
|
"type": "N/A",
|
||||||
|
}
|
||||||
|
start_val = part.xpath(
|
||||||
|
"./*[local-name()='Attribute'][@Name='StartAddress']/*[local-name()='Value']/text()"
|
||||||
|
)
|
||||||
|
len_val = part.xpath(
|
||||||
|
"./*[local-name()='Attribute'][@Name='Length']/*[local-name()='Value']/text()"
|
||||||
|
)
|
||||||
|
type_val = part.xpath(
|
||||||
|
"./*[local-name()='Attribute'][@Name='IoType']/*[local-name()='Value']/text()"
|
||||||
|
)
|
||||||
|
if start_val:
|
||||||
|
addr_details["start"] = start_val[0]
|
||||||
|
if len_val:
|
||||||
|
addr_details["length"] = len_val[0]
|
||||||
|
if type_val:
|
||||||
|
addr_details["type"] = type_val[0]
|
||||||
|
if addr_details["start"] != "N/A":
|
||||||
|
device_info["io_addresses"].append(addr_details)
|
||||||
|
|
||||||
|
# Extract External Interfaces
|
||||||
|
interfaces = elem.xpath("./*[local-name()='ExternalInterface']")
|
||||||
|
for interface in interfaces:
|
||||||
|
interface_info = {
|
||||||
|
"name": interface.get("Name", "N/A"),
|
||||||
|
"id": interface.get("ID", "N/A"),
|
||||||
|
"ref_base_class": interface.get("RefBaseClassPath", "N/A"),
|
||||||
|
}
|
||||||
|
device_info["interfaces"].append(interface_info)
|
||||||
|
|
||||||
|
# Extract Network Nodes
|
||||||
|
network_nodes = elem.xpath(
|
||||||
|
".//*[local-name()='InternalElement']/*[local-name()='SupportedRoleClass'][contains(@RefRoleClassPath, 'Node')]"
|
||||||
|
)
|
||||||
|
for node_role in network_nodes:
|
||||||
|
node_elem = node_role.getparent()
|
||||||
|
node_info = {
|
||||||
|
"name": node_elem.get("Name", "N/A"),
|
||||||
|
"type": "N/A",
|
||||||
|
"address": "N/A",
|
||||||
|
}
|
||||||
|
type_attr = node_elem.xpath(
|
||||||
|
"./*[local-name()='Attribute'][@Name='Type']/*[local-name()='Value']/text()"
|
||||||
|
)
|
||||||
|
addr_attr = node_elem.xpath(
|
||||||
|
"./*[local-name()='Attribute'][@Name='NetworkAddress']/*[local-name()='Value']/text()"
|
||||||
|
)
|
||||||
|
if type_attr:
|
||||||
|
node_info["type"] = type_attr[0]
|
||||||
|
if addr_attr:
|
||||||
|
node_info["address"] = addr_attr[0]
|
||||||
|
if node_info["address"] != "N/A":
|
||||||
|
device_info["network_nodes"].append(node_info)
|
||||||
|
|
||||||
|
project_data["devices"][elem_id] = device_info
|
||||||
|
|
||||||
|
# Find and process InternalLinks
|
||||||
|
internal_links = root.xpath(".//*[local-name()='InternalLink']")
|
||||||
|
print(f"Found {len(internal_links)} InternalLink(s) globally.")
|
||||||
|
for link in internal_links:
|
||||||
|
side_a_match = re.match(r"([^:]+):?(.*)", link.get("RefPartnerSideA", ""))
|
||||||
|
side_b_match = re.match(r"([^:]+):?(.*)", link.get("RefPartnerSideB", ""))
|
||||||
|
side_a_id = side_a_match.group(1) if side_a_match else "N/A"
|
||||||
|
side_a_suffix = (
|
||||||
|
side_a_match.group(2)
|
||||||
|
if side_a_match and side_a_match.group(2)
|
||||||
|
else side_a_id
|
||||||
|
)
|
||||||
|
side_b_id = side_b_match.group(1) if side_b_match else "N/A"
|
||||||
|
side_b_suffix = (
|
||||||
|
side_b_match.group(2)
|
||||||
|
if side_b_match and side_b_match.group(2)
|
||||||
|
else side_b_id
|
||||||
|
)
|
||||||
|
|
||||||
|
link_info = {
|
||||||
|
"name": link.get("Name", "N/A"),
|
||||||
|
"side_a_id": side_a_id,
|
||||||
|
"side_a_ref_suffix": side_a_suffix,
|
||||||
|
"side_b_id": side_b_id,
|
||||||
|
"side_b_ref_suffix": side_b_suffix,
|
||||||
|
"side_a_device_name": project_data["devices"]
|
||||||
|
.get(side_a_id, {})
|
||||||
|
.get("name", side_a_id),
|
||||||
|
"side_b_device_name": project_data["devices"]
|
||||||
|
.get(side_b_id, {})
|
||||||
|
.get("name", side_b_id),
|
||||||
|
}
|
||||||
|
project_data["connections"].append(link_info)
|
||||||
|
|
||||||
|
return project_data
|
||||||
|
|
||||||
|
|
||||||
|
# --- generate_markdown_obsidian function remains the same as in v6 ---
|
||||||
|
def generate_markdown_obsidian(project_data, md_file_path):
|
||||||
|
"""Generates structured Markdown output for Obsidian."""
|
||||||
|
|
||||||
|
def generate_table(headers, rows):
|
||||||
|
lines = []
|
||||||
|
if not rows:
|
||||||
|
return ["No data available."]
|
||||||
|
lines.append("| " + " | ".join(headers) + " |")
|
||||||
|
lines.append("|" + "---|" * len(headers))
|
||||||
|
for row in rows:
|
||||||
|
lines.append("| " + " | ".join(map(str, row)) + " |")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
markdown_lines = ["# Project Hardware & IO Summary (from CAx Export)", ""]
|
||||||
|
network_rows = []
|
||||||
|
for device_id, device in project_data["devices"].items():
|
||||||
|
for node in device.get("network_nodes", []):
|
||||||
|
network_rows.append(
|
||||||
|
[
|
||||||
|
device.get("name", "N/A"),
|
||||||
|
node.get("name", "N/A"),
|
||||||
|
node.get("type", "N/A"),
|
||||||
|
node.get("address", "N/A"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
markdown_lines.append("## Network Configuration")
|
||||||
|
markdown_lines.extend(
|
||||||
|
generate_table(
|
||||||
|
["Parent Device", "Interface/Node Name", "Type", "Address (IP/DP)"],
|
||||||
|
network_rows,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
markdown_lines.append("")
|
||||||
|
|
||||||
|
io_module_rows = []
|
||||||
|
for device_id, device in project_data["devices"].items():
|
||||||
|
if device.get("io_addresses"):
|
||||||
|
address_strs = [
|
||||||
|
f"{addr['type']} Start:{addr['start']} Len:{addr['length']} (Area:{addr['area']})"
|
||||||
|
for addr in device["io_addresses"]
|
||||||
|
]
|
||||||
|
io_module_rows.append(
|
||||||
|
[
|
||||||
|
device.get("name", "N/A"),
|
||||||
|
device.get("type_name", "N/A"),
|
||||||
|
device.get("order_number", "N/A"),
|
||||||
|
device.get("position", "N/A"),
|
||||||
|
"<br>".join(address_strs),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
markdown_lines.append("## I/O Modules & Addresses")
|
||||||
|
markdown_lines.extend(
|
||||||
|
generate_table(
|
||||||
|
[
|
||||||
|
"Module Name",
|
||||||
|
"Type Name",
|
||||||
|
"Order Number",
|
||||||
|
"Slot/Pos",
|
||||||
|
"Logical Addresses",
|
||||||
|
],
|
||||||
|
io_module_rows,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
markdown_lines.append("")
|
||||||
|
|
||||||
|
connection_rows = []
|
||||||
|
for i, conn in enumerate(project_data.get("connections", [])):
|
||||||
|
if i >= 50: # Limit links shown in MD
|
||||||
|
connection_rows.append(["...", "...", "..."])
|
||||||
|
break
|
||||||
|
source = f"{conn.get('side_a_device_name', 'UNKNOWN')}::{conn.get('side_a_ref_suffix', 'N/A')}"
|
||||||
|
target = f"{conn.get('side_b_device_name', 'UNKNOWN')}::{conn.get('side_b_ref_suffix', 'N/A')}"
|
||||||
|
connection_rows.append([conn.get("name", "N/A"), f"`{source}`", f"`{target}`"])
|
||||||
|
markdown_lines.append("## Connections / IO Tag Links")
|
||||||
|
markdown_lines.extend(
|
||||||
|
generate_table(
|
||||||
|
[
|
||||||
|
"Link Name",
|
||||||
|
"Source (Device::Channel/Interface)",
|
||||||
|
"Target (Device::Tag/Interface)",
|
||||||
|
],
|
||||||
|
connection_rows,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
markdown_lines.append("")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(md_file_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write("\n".join(markdown_lines))
|
||||||
|
print(f"Markdown summary written to: {md_file_path}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR writing Markdown file {md_file_path}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- process_aml_file_v7 function calls the corrected extraction ---
|
||||||
|
def process_aml_file_v7(aml_file_path, json_output_path, md_output_path):
|
||||||
|
"""Main function using lxml with local-name() and corrected extraction."""
|
||||||
|
print(f"Processing AML file: {aml_file_path}")
|
||||||
|
if not os.path.exists(aml_file_path):
|
||||||
|
print(f"ERROR: Input AML file not found at {aml_file_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
parser = ET.XMLParser(remove_blank_text=True)
|
||||||
|
tree = ET.parse(aml_file_path, parser)
|
||||||
|
root = tree.getroot()
|
||||||
|
project_data = extract_aml_data_v7(root) # Call corrected function
|
||||||
|
|
||||||
|
print(f"Generating JSON output: {json_output_path}")
|
||||||
|
try:
|
||||||
|
with open(json_output_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(project_data, f, indent=4, default=str)
|
||||||
|
print(f"JSON data written successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR writing JSON file {json_output_path}: {e}")
|
||||||
|
|
||||||
|
generate_markdown_obsidian(
|
||||||
|
project_data, md_output_path
|
||||||
|
) # Use the same MD generator
|
||||||
|
|
||||||
|
except ET.LxmlError as xml_err:
|
||||||
|
print(f"ERROR parsing XML file {aml_file_path} with lxml: {xml_err}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR processing AML file {aml_file_path}: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
|
# --- Main Execution ---
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("--- AML (CAx Export) to JSON and Obsidian MD Converter (v7 - Corrected) ---")
|
||||||
|
input_aml_file = "SAE196_c0.2.XML_CAx_Export.xml"
|
||||||
|
input_path = Path(input_aml_file)
|
||||||
|
if not input_path.is_file():
|
||||||
|
print(f"ERROR: Input file '{input_aml_file}' not found.")
|
||||||
|
sys.exit(1)
|
||||||
|
output_json_file = input_path.with_suffix(".detailed.json")
|
||||||
|
output_md_file = input_path.with_name(f"{input_path.stem}_Obsidian_Summary.md")
|
||||||
|
print(f"Input AML: {input_path}")
|
||||||
|
print(f"Output JSON: {output_json_file}")
|
||||||
|
print(f"Output Markdown: {output_md_file}")
|
||||||
|
process_aml_file_v7(
|
||||||
|
str(input_path), str(output_json_file), str(output_md_file)
|
||||||
|
) # Call v7
|
||||||
|
print("\nScript finished.")
|
Loading…
Reference in New Issue