2424 lines
81 KiB
Python
2424 lines
81 KiB
Python
from flask import (
|
|
Flask,
|
|
request,
|
|
jsonify,
|
|
send_from_directory,
|
|
Response,
|
|
)
|
|
from flask_cors import CORS
|
|
import json
|
|
import time
|
|
from datetime import datetime
|
|
import os
|
|
import sys
|
|
|
|
try:
|
|
import tkinter as tk
|
|
from tkinter import filedialog
|
|
|
|
TKINTER_AVAILABLE = True
|
|
except ImportError:
|
|
TKINTER_AVAILABLE = False
|
|
print("Warning: tkinter not available. File browse functionality will be limited.")
|
|
from core import PLCDataStreamer
|
|
from utils.json_manager import JSONManager, SchemaManager
|
|
from utils.symbol_loader import SymbolLoader
|
|
from utils.symbol_processor import SymbolProcessor
|
|
|
|
app = Flask(__name__)
|
|
CORS(
|
|
app,
|
|
resources={
|
|
r"/api/*": {
|
|
"origins": [
|
|
"http://localhost:5173/app",
|
|
"http://127.0.0.1:5173/app",
|
|
"*",
|
|
]
|
|
}
|
|
},
|
|
)
|
|
app.secret_key = "plc_streamer_secret_key"
|
|
|
|
# React build directory (for Vite production build)
|
|
REACT_DIST_DIR = os.path.join(os.path.abspath("."), "frontend", "dist")
|
|
|
|
|
|
def resource_path(relative_path):
|
|
"""Get absolute path to resource, works for dev and for PyInstaller"""
|
|
try:
|
|
# PyInstaller creates a temp folder and stores path in _MEIPASS
|
|
base_path = sys._MEIPASS
|
|
except Exception:
|
|
# Not running in a bundle
|
|
base_path = os.path.abspath(".")
|
|
|
|
return os.path.join(base_path, relative_path)
|
|
|
|
|
|
def project_path(*parts: str) -> str:
|
|
"""Build absolute path from the project root (based on this file)."""
|
|
base_dir = os.path.abspath(os.path.dirname(__file__))
|
|
return os.path.join(base_dir, *parts)
|
|
|
|
|
|
# Global instances
|
|
streamer = None
|
|
json_manager = JSONManager()
|
|
schema_manager = SchemaManager()
|
|
|
|
|
|
def check_streamer_initialized():
|
|
"""Check if streamer is initialized, return error response if not"""
|
|
if streamer is None:
|
|
return jsonify({"error": "Application not initialized"}), 503
|
|
return None
|
|
|
|
|
|
@app.route("/images/<filename>")
|
|
def serve_image(filename):
|
|
"""Serve images from .images directory"""
|
|
return send_from_directory(".images", filename)
|
|
|
|
|
|
@app.route("/favicon.ico")
|
|
def serve_favicon():
|
|
"""Serve application favicon from React public folder.
|
|
|
|
Priority:
|
|
1) frontend/public/favicon.ico
|
|
2) frontend/public/record.png
|
|
"""
|
|
# Use absolute paths for reliability (works in dev and bundled)
|
|
public_dir = project_path("frontend", "public")
|
|
public_favicon = os.path.join(public_dir, "favicon.ico")
|
|
public_record = os.path.join(public_dir, "record.png")
|
|
|
|
if os.path.exists(public_favicon):
|
|
return send_from_directory(public_dir, "favicon.ico")
|
|
if os.path.exists(public_record):
|
|
return send_from_directory(public_dir, "record.png")
|
|
|
|
# Final fallback: 404
|
|
return Response("Favicon not found", status=404, mimetype="text/plain")
|
|
|
|
|
|
@app.route("/record.png")
|
|
def serve_public_record_png():
|
|
"""Serve /record.png from the React public folder."""
|
|
public_dir = project_path("frontend", "public")
|
|
public_record = os.path.join(public_dir, "record.png")
|
|
|
|
if os.path.exists(public_record):
|
|
return send_from_directory(public_dir, "record.png")
|
|
|
|
return Response("record.png not found", status=404, mimetype="text/plain")
|
|
|
|
|
|
# ==============================
|
|
# Frontend (React SPA)
|
|
# ==============================
|
|
|
|
|
|
@app.route("/assets/<path:filename>")
|
|
def serve_react_assets(filename):
|
|
"""Serve built React assets from Vite (production)."""
|
|
assets_dir = os.path.join(REACT_DIST_DIR, "assets")
|
|
return send_from_directory(assets_dir, filename)
|
|
|
|
|
|
@app.route("/")
|
|
@app.route("/app")
|
|
@app.route("/app/<path:path>")
|
|
def serve_react_index(path: str = ""):
|
|
"""Serve React SPA index (expects Vite build at frontend/dist)."""
|
|
index_path = os.path.join(REACT_DIST_DIR, "index.html")
|
|
if not os.path.exists(index_path):
|
|
return Response(
|
|
"React build not found. Run 'cd frontend && npm install && npm run build' first.",
|
|
status=500,
|
|
mimetype="text/plain",
|
|
)
|
|
return send_from_directory(REACT_DIST_DIR, "index.html")
|
|
|
|
|
|
## Legacy UI removed after migration to React
|
|
|
|
|
|
# ==============================
|
|
# Unified JSON Configuration API
|
|
# ==============================
|
|
|
|
|
|
@app.route("/api/config/schemas", methods=["GET"])
|
|
def list_config_schemas():
|
|
"""List all available configuration schemas."""
|
|
try:
|
|
schemas = schema_manager.list_available_schemas()
|
|
return jsonify({"success": True, "schemas": schemas})
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/schema/<schema_id>", methods=["GET"])
|
|
def get_config_schema(schema_id):
|
|
"""Get a specific JSON schema with optional UI schema."""
|
|
try:
|
|
# Get main schema
|
|
schema = schema_manager.get_schema(schema_id)
|
|
if not schema:
|
|
return (
|
|
jsonify({"success": False, "error": f"Schema '{schema_id}' not found"}),
|
|
404,
|
|
)
|
|
|
|
# Get optional UI schema
|
|
ui_schema = schema_manager.get_ui_schema(schema_id)
|
|
|
|
response = {"success": True, "schema": schema}
|
|
if ui_schema:
|
|
response["ui_schema"] = ui_schema
|
|
|
|
return jsonify(response)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/<config_id>", methods=["GET"])
|
|
def read_config(config_id):
|
|
"""Read configuration data from JSON file."""
|
|
try:
|
|
data = json_manager.read_json(config_id)
|
|
return jsonify({"success": True, "data": data})
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "error": str(e)}), 400
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/<config_id>", methods=["PUT"])
|
|
def write_config(config_id):
|
|
"""Write configuration data to JSON file."""
|
|
try:
|
|
payload = request.get_json(force=True, silent=False)
|
|
if not payload:
|
|
return jsonify({"success": False, "error": "No JSON data provided"}), 400
|
|
|
|
# Write the data
|
|
json_manager.write_json(config_id, payload)
|
|
|
|
# Notify backend to reload if it's PLC config
|
|
if config_id == "plc" and streamer:
|
|
try:
|
|
streamer.config_manager.load_configuration()
|
|
except Exception as e:
|
|
# Log the error but don't fail the save operation
|
|
print(f"Warning: Could not reload config in backend: {e}")
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Configuration '{config_id}' saved successfully",
|
|
}
|
|
)
|
|
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "error": str(e)}), 400
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/<config_id>/export", methods=["GET"])
|
|
def export_config(config_id):
|
|
"""Export configuration as downloadable JSON file."""
|
|
try:
|
|
data = json_manager.read_json(config_id)
|
|
|
|
# Prepare download response
|
|
content = json.dumps(data, indent=2, ensure_ascii=False)
|
|
filename = f"{config_id}_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
|
|
|
|
response = Response(content, mimetype="application/json")
|
|
response.headers["Content-Disposition"] = f"attachment; filename={filename}"
|
|
return response
|
|
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "error": str(e)}), 400
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/<config_id>/reload", methods=["POST"])
|
|
def reload_config(config_id):
|
|
"""Notify backend to reload configuration from JSON files."""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
if config_id == "plc":
|
|
streamer.config_manager.load_configuration()
|
|
elif config_id in ["dataset-definitions", "dataset-variables"]:
|
|
# Reload dataset configuration
|
|
streamer.load_datasets()
|
|
elif config_id in ["plot-definitions", "plot-variables"]:
|
|
# Reload plot configuration if needed
|
|
pass
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Configuration '{config_id}' reloaded successfully",
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/config/dataset-variables/expanded", methods=["GET"])
|
|
def get_expanded_dataset_variables():
|
|
"""Get dataset variables with symbolic variables expanded."""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
# Get the expanded variables from the streamer's config manager
|
|
expanded_data = {"variables": []}
|
|
|
|
if hasattr(streamer.config_manager, 'datasets') and streamer.config_manager.datasets:
|
|
for dataset_id, dataset_config in streamer.config_manager.datasets.items():
|
|
if 'variables' in dataset_config and dataset_config['variables']:
|
|
expanded_data["variables"].append({
|
|
"dataset_id": dataset_id,
|
|
"variables": dataset_config['variables']
|
|
})
|
|
|
|
return jsonify({"success": True, "data": expanded_data})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
# ==============================
|
|
# Operational API (PLC Control, Streaming, etc.)
|
|
# ==============================
|
|
|
|
|
|
@app.route("/api/plc/config", methods=["POST"])
|
|
def update_plc_config():
|
|
"""Update PLC configuration"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
ip = data.get("ip", "10.1.33.11")
|
|
rack = int(data.get("rack", 0))
|
|
slot = int(data.get("slot", 2))
|
|
|
|
streamer.update_plc_config(ip, rack, slot)
|
|
return jsonify({"success": True, "message": "PLC configuration updated"})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/udp/config", methods=["POST"])
|
|
def update_udp_config():
|
|
"""Update UDP configuration"""
|
|
try:
|
|
data = request.get_json()
|
|
host = data.get("host", "127.0.0.1")
|
|
port = int(data.get("port", 9870))
|
|
sampling_interval = data.get("sampling_interval") # Optional, can be None
|
|
|
|
streamer.update_udp_config(host, port, sampling_interval)
|
|
return jsonify({"success": True, "message": "UDP configuration updated"})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/csv/config", methods=["GET"])
|
|
def get_csv_config():
|
|
"""Get CSV recording configuration"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
csv_config = streamer.config_manager.csv_config.copy()
|
|
|
|
# Add current directory information
|
|
current_dir = streamer.config_manager.get_csv_directory_path()
|
|
csv_config["current_directory"] = os.path.abspath(current_dir)
|
|
csv_config["directory_exists"] = os.path.exists(current_dir)
|
|
|
|
# Add disk space info
|
|
disk_info = streamer.get_disk_space_info()
|
|
if disk_info:
|
|
csv_config["disk_space"] = disk_info
|
|
|
|
return jsonify({"success": True, "config": csv_config})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/csv/config", methods=["POST"])
|
|
def update_csv_config():
|
|
"""Update CSV recording configuration"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
|
|
# Extract valid configuration parameters
|
|
config_updates = {}
|
|
valid_params = {
|
|
"records_directory",
|
|
"rotation_enabled",
|
|
"max_size_mb",
|
|
"max_days",
|
|
"max_hours",
|
|
"cleanup_interval_hours",
|
|
}
|
|
|
|
for param in valid_params:
|
|
if param in data:
|
|
config_updates[param] = data[param]
|
|
|
|
if not config_updates:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "No valid configuration parameters provided",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Update configuration
|
|
result = streamer.config_manager.update_csv_config(**config_updates)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": "CSV configuration updated successfully",
|
|
"old_config": result["old_config"],
|
|
"new_config": result["new_config"],
|
|
}
|
|
)
|
|
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/csv/cleanup", methods=["POST"])
|
|
def trigger_csv_cleanup():
|
|
"""Manually trigger CSV cleanup"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
# Perform cleanup
|
|
streamer.streamer.perform_csv_cleanup()
|
|
return jsonify(
|
|
{"success": True, "message": "CSV cleanup completed successfully"}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/csv/directory/info", methods=["GET"])
|
|
def get_csv_directory_info():
|
|
"""Get information about CSV directory and files"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
base_dir = streamer.config_manager.get_csv_directory_path()
|
|
|
|
info = {
|
|
"base_directory": os.path.abspath(base_dir),
|
|
"directory_exists": os.path.exists(base_dir),
|
|
"total_files": 0,
|
|
"total_size_mb": 0,
|
|
"oldest_file": None,
|
|
"newest_file": None,
|
|
"day_folders": [],
|
|
}
|
|
|
|
if os.path.exists(base_dir):
|
|
total_size = 0
|
|
oldest_time = None
|
|
newest_time = None
|
|
|
|
for day_folder in os.listdir(base_dir):
|
|
day_path = os.path.join(base_dir, day_folder)
|
|
if os.path.isdir(day_path):
|
|
day_info = {"name": day_folder, "files": 0, "size_mb": 0}
|
|
|
|
for file_name in os.listdir(day_path):
|
|
if file_name.endswith(".csv"):
|
|
file_path = os.path.join(day_path, file_name)
|
|
if os.path.isfile(file_path):
|
|
stat = os.stat(file_path)
|
|
file_size = stat.st_size
|
|
file_time = stat.st_mtime
|
|
|
|
info["total_files"] += 1
|
|
day_info["files"] += 1
|
|
total_size += file_size
|
|
day_info["size_mb"] += file_size / (1024 * 1024)
|
|
|
|
if oldest_time is None or file_time < oldest_time:
|
|
oldest_time = file_time
|
|
info["oldest_file"] = datetime.fromtimestamp(
|
|
file_time
|
|
).isoformat()
|
|
|
|
if newest_time is None or file_time > newest_time:
|
|
newest_time = file_time
|
|
info["newest_file"] = datetime.fromtimestamp(
|
|
file_time
|
|
).isoformat()
|
|
|
|
day_info["size_mb"] = round(day_info["size_mb"], 2)
|
|
info["day_folders"].append(day_info)
|
|
|
|
info["total_size_mb"] = round(total_size / (1024 * 1024), 2)
|
|
info["day_folders"].sort(key=lambda x: x["name"], reverse=True)
|
|
|
|
return jsonify({"success": True, "info": info})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plc/connect", methods=["POST"])
|
|
def connect_plc():
|
|
"""Connect to PLC"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
if streamer.connect_plc():
|
|
return jsonify({"success": True, "message": "Connected to PLC"})
|
|
else:
|
|
# Extraer detalle de error si disponible
|
|
last_error = None
|
|
try:
|
|
last_error = streamer.plc_client.last_error
|
|
except Exception:
|
|
last_error = None
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Error connecting to PLC",
|
|
"error": last_error,
|
|
"plc_config": streamer.config_manager.plc_config,
|
|
}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/plc/disconnect", methods=["POST"])
|
|
def disconnect_plc():
|
|
"""Disconnect from PLC"""
|
|
streamer.disconnect_plc()
|
|
return jsonify({"success": True, "message": "Disconnected from PLC"})
|
|
|
|
|
|
@app.route("/api/variables", methods=["POST"])
|
|
def add_variable():
|
|
"""Add a new variable"""
|
|
try:
|
|
data = request.get_json()
|
|
name = data.get("name")
|
|
area = data.get("area")
|
|
db = int(data.get("db"))
|
|
offset = int(data.get("offset"))
|
|
var_type = data.get("type")
|
|
bit = data.get("bit") # Added bit parameter
|
|
|
|
valid_types = [
|
|
"real",
|
|
"int",
|
|
"bool",
|
|
"dint",
|
|
"word",
|
|
"byte",
|
|
"uint",
|
|
"udint",
|
|
"sint",
|
|
"usint",
|
|
]
|
|
valid_areas = ["db", "mw", "m", "pew", "pe", "paw", "pa", "e", "a", "mb"]
|
|
|
|
if (
|
|
not name
|
|
or not area
|
|
or var_type not in valid_types
|
|
or area.lower() not in valid_areas
|
|
):
|
|
return jsonify({"success": False, "message": "Invalid data"}), 400
|
|
|
|
if area.lower() in ["e", "a", "mb"] and bit is None:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Bit position must be specified for bit areas",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
if area.lower() in ["e", "a", "mb"] and (bit < 0 or bit > 7):
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Bit position must be between 0 and 7",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
streamer.add_variable(name, area, db, offset, var_type, bit)
|
|
return jsonify({"success": True, "message": f"Variable {name} added"})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/variables/<name>", methods=["DELETE"])
|
|
def remove_variable(name):
|
|
"""Remove a variable"""
|
|
streamer.remove_variable(name)
|
|
return jsonify({"success": True, "message": f"Variable {name} removed"})
|
|
|
|
|
|
@app.route("/api/variables/<name>", methods=["GET"])
|
|
def get_variable(name):
|
|
"""Get a specific variable configuration from current dataset"""
|
|
try:
|
|
if not streamer.current_dataset_id:
|
|
return (
|
|
jsonify({"success": False, "message": "No dataset selected"}),
|
|
400,
|
|
)
|
|
|
|
current_variables = streamer.get_dataset_variables(streamer.current_dataset_id)
|
|
if name not in current_variables:
|
|
return (
|
|
jsonify({"success": False, "message": f"Variable {name} not found"}),
|
|
404,
|
|
)
|
|
|
|
var_config = current_variables[name].copy()
|
|
var_config["name"] = name
|
|
|
|
# Check if variable is in streaming list for current dataset
|
|
streaming_vars = streamer.datasets[streamer.current_dataset_id].get(
|
|
"streaming_variables", []
|
|
)
|
|
var_config["streaming"] = name in streaming_vars
|
|
|
|
return jsonify({"success": True, "variable": var_config})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/variables/<name>", methods=["PUT"])
|
|
def update_variable(name):
|
|
"""Update an existing variable in current dataset"""
|
|
try:
|
|
if not streamer.current_dataset_id:
|
|
return jsonify({"success": False, "message": "No dataset selected"}), 400
|
|
|
|
data = request.get_json()
|
|
new_name = data.get("name", name)
|
|
area = data.get("area")
|
|
db = int(data.get("db", 1))
|
|
offset = int(data.get("offset"))
|
|
var_type = data.get("type")
|
|
bit = data.get("bit")
|
|
|
|
valid_types = [
|
|
"real",
|
|
"int",
|
|
"bool",
|
|
"dint",
|
|
"word",
|
|
"byte",
|
|
"uint",
|
|
"udint",
|
|
"sint",
|
|
"usint",
|
|
]
|
|
valid_areas = ["db", "mw", "m", "pew", "pe", "paw", "pa", "e", "a", "mb"]
|
|
|
|
if (
|
|
not new_name
|
|
or not area
|
|
or var_type not in valid_types
|
|
or area.lower() not in valid_areas
|
|
):
|
|
return jsonify({"success": False, "message": "Invalid data"}), 400
|
|
|
|
if area.lower() in ["e", "a", "mb"] and bit is None:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Bit position must be specified for bit areas",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
if area.lower() in ["e", "a", "mb"] and (bit < 0 or bit > 7):
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Bit position must be between 0 and 7",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
current_dataset_id = streamer.current_dataset_id
|
|
current_variables = streamer.get_dataset_variables(current_dataset_id)
|
|
|
|
# Check if variable exists in current dataset
|
|
if name not in current_variables:
|
|
return (
|
|
jsonify({"success": False, "message": f"Variable {name} not found"}),
|
|
404,
|
|
)
|
|
|
|
# Check if new name already exists (if name is changing)
|
|
if name != new_name and new_name in current_variables:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": f"Variable {new_name} already exists",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Preserve streaming state
|
|
streaming_vars = streamer.datasets[current_dataset_id].get(
|
|
"streaming_variables", []
|
|
)
|
|
was_streaming = name in streaming_vars
|
|
|
|
# Remove old variable
|
|
streamer.remove_variable_from_dataset(current_dataset_id, name)
|
|
|
|
# Add updated variable
|
|
streamer.add_variable_to_dataset(
|
|
current_dataset_id, new_name, area, db, offset, var_type, bit, was_streaming
|
|
)
|
|
|
|
return jsonify({"success": True, "message": "Variable updated successfully"})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/variables/<name>/streaming", methods=["POST"])
|
|
def toggle_variable_streaming(name):
|
|
"""Toggle streaming for a specific variable in current dataset"""
|
|
try:
|
|
if not streamer.current_dataset_id:
|
|
return jsonify({"success": False, "message": "No dataset selected"}), 400
|
|
|
|
data = request.get_json()
|
|
enabled = data.get("enabled", False)
|
|
|
|
# Use the new dataset-specific method
|
|
streamer.toggle_variable_streaming(streamer.current_dataset_id, name, enabled)
|
|
|
|
status = "enabled" if enabled else "disabled"
|
|
return jsonify(
|
|
{"success": True, "message": f"Variable {name} streaming {status}"}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/variables/streaming", methods=["GET"])
|
|
def get_streaming_variables():
|
|
"""Get list of variables enabled for streaming in current dataset"""
|
|
if streamer is None:
|
|
return jsonify({"success": False, "error": "Streamer not initialized"}), 503
|
|
|
|
# Get streaming variables from current dataset
|
|
streaming_vars = []
|
|
if streamer.current_dataset_id and streamer.current_dataset_id in streamer.datasets:
|
|
streaming_vars = streamer.datasets[streamer.current_dataset_id].get(
|
|
"streaming_variables", []
|
|
)
|
|
|
|
return jsonify({"success": True, "streaming_variables": streaming_vars})
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>/variables/values", methods=["GET"])
|
|
def get_dataset_variable_values(dataset_id):
|
|
"""Get current values of all variables in a dataset - CACHE ONLY"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
# Check if dataset exists
|
|
if dataset_id not in streamer.datasets:
|
|
return (
|
|
jsonify(
|
|
{"success": False, "message": f"Dataset '{dataset_id}' not found"}
|
|
),
|
|
404,
|
|
)
|
|
|
|
# Get dataset variables
|
|
dataset_variables = streamer.get_dataset_variables(dataset_id)
|
|
|
|
if not dataset_variables:
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": "No variables defined in this dataset",
|
|
"values": {},
|
|
"source": "no_variables",
|
|
}
|
|
)
|
|
|
|
# Check if dataset is active (required for cache to be populated)
|
|
if dataset_id not in streamer.active_datasets:
|
|
return jsonify(
|
|
{
|
|
"success": False,
|
|
"message": f"Dataset '{dataset_id}' is not active. Activate the dataset to start reading variables and populate cache.",
|
|
"error_type": "dataset_inactive",
|
|
"values": {},
|
|
"detailed_errors": {},
|
|
"stats": {
|
|
"success": 0,
|
|
"failed": 0,
|
|
"total": len(dataset_variables),
|
|
},
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
"source": "no_cache_dataset_inactive",
|
|
"is_cached": False,
|
|
}
|
|
)
|
|
|
|
# Check if PLC is connected (required for streaming to populate cache)
|
|
if not streamer.plc_client.is_connected():
|
|
return jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "PLC not connected. Connect to PLC and activate dataset to populate cache.",
|
|
"error_type": "plc_disconnected",
|
|
"values": {},
|
|
"detailed_errors": {},
|
|
"stats": {
|
|
"success": 0,
|
|
"failed": 0,
|
|
"total": len(dataset_variables),
|
|
},
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
"source": "no_cache_plc_disconnected",
|
|
"is_cached": False,
|
|
}
|
|
)
|
|
|
|
# Get cached values - this is the ONLY source of data according to application principles
|
|
if not streamer.has_cached_values(dataset_id):
|
|
return jsonify(
|
|
{
|
|
"success": False,
|
|
"message": f"No cached values available for dataset '{dataset_id}'. Cache is populated by the streaming process at the dataset's configured interval. Please wait for the next reading cycle.",
|
|
"error_type": "no_cache_available",
|
|
"values": {},
|
|
"detailed_errors": {},
|
|
"stats": {
|
|
"success": 0,
|
|
"failed": 0,
|
|
"total": len(dataset_variables),
|
|
},
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
"source": "no_cache_available",
|
|
"is_cached": False,
|
|
"note": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
|
}
|
|
)
|
|
|
|
# Get cached values (the ONLY valid source according to application design)
|
|
read_result = streamer.get_cached_dataset_values(dataset_id)
|
|
|
|
# Convert timestamp from ISO format to readable format for consistency
|
|
if read_result.get("timestamp"):
|
|
try:
|
|
cached_timestamp = datetime.fromisoformat(read_result["timestamp"])
|
|
read_result["timestamp"] = cached_timestamp.strftime(
|
|
"%Y-%m-%d %H:%M:%S"
|
|
)
|
|
except:
|
|
pass # Keep original timestamp if conversion fails
|
|
|
|
# Extract values and handle diagnostics
|
|
if not read_result.get("success", False):
|
|
# Complete failure case
|
|
error_msg = read_result.get("error", "Unknown error reading variables")
|
|
error_type = read_result.get("error_type", "unknown")
|
|
|
|
# Log detailed error information
|
|
if streamer.logger:
|
|
streamer.logger.error(
|
|
f"Cached values indicate failure for dataset '{dataset_id}': {error_msg}"
|
|
)
|
|
if read_result.get("errors"):
|
|
for var_name, var_error in read_result["errors"].items():
|
|
streamer.logger.error(f" Variable '{var_name}': {var_error}")
|
|
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": error_msg,
|
|
"error_type": error_type,
|
|
"values": {},
|
|
"detailed_errors": read_result.get("errors", {}),
|
|
"stats": read_result.get("stats", {}),
|
|
"timestamp": read_result.get(
|
|
"timestamp", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
),
|
|
"source": "cache",
|
|
"is_cached": True,
|
|
}
|
|
),
|
|
500,
|
|
)
|
|
|
|
# Success or partial success case
|
|
raw_values = read_result.get("values", {})
|
|
variable_errors = read_result.get("errors", {})
|
|
stats = read_result.get("stats", {})
|
|
|
|
# Format values for display
|
|
formatted_values = {}
|
|
error_details = {}
|
|
|
|
for var_name, value in raw_values.items():
|
|
if value is not None:
|
|
var_config = dataset_variables[var_name]
|
|
var_type = var_config.get("type", "unknown")
|
|
|
|
# Format value based on type
|
|
try:
|
|
if var_type == "real":
|
|
formatted_values[var_name] = (
|
|
f"{value:.3f}"
|
|
if isinstance(value, (int, float))
|
|
else str(value)
|
|
)
|
|
elif var_type == "bool":
|
|
formatted_values[var_name] = "TRUE" if value else "FALSE"
|
|
elif var_type in [
|
|
"int",
|
|
"uint",
|
|
"dint",
|
|
"udint",
|
|
"word",
|
|
"byte",
|
|
"sint",
|
|
"usint",
|
|
]:
|
|
formatted_values[var_name] = (
|
|
str(int(value))
|
|
if isinstance(value, (int, float))
|
|
else str(value)
|
|
)
|
|
else:
|
|
formatted_values[var_name] = str(value)
|
|
except Exception as format_error:
|
|
formatted_values[var_name] = "FORMAT_ERROR"
|
|
error_details[var_name] = f"Format error: {str(format_error)}"
|
|
else:
|
|
# Variable had an error - get the specific error message
|
|
specific_error = variable_errors.get(var_name, "Unknown error")
|
|
formatted_values[var_name] = "ERROR"
|
|
error_details[var_name] = specific_error
|
|
|
|
# Prepare response message
|
|
total_vars = stats.get("total", len(dataset_variables))
|
|
success_vars = stats.get("success", 0)
|
|
failed_vars = stats.get("failed", 0)
|
|
|
|
# Determine data source for message (always cache now)
|
|
data_source = "cache"
|
|
source_text = " (from streaming cache)"
|
|
|
|
if failed_vars == 0:
|
|
message = (
|
|
f"Successfully displaying all {success_vars} variables{source_text}"
|
|
)
|
|
response_success = True
|
|
else:
|
|
message = f"Partial data available: {success_vars}/{total_vars} variables have valid cached values, {failed_vars} failed{source_text}"
|
|
response_success = True # Still success if we got some values
|
|
|
|
# Log warnings for partial failures
|
|
if streamer.logger:
|
|
streamer.logger.warning(
|
|
f"Partial failure in cached values for dataset '{dataset_id}': {message}"
|
|
)
|
|
for var_name, var_error in error_details.items():
|
|
if formatted_values.get(var_name) == "ERROR":
|
|
streamer.logger.warning(f" Variable '{var_name}': {var_error}")
|
|
|
|
return jsonify(
|
|
{
|
|
"success": response_success,
|
|
"message": message,
|
|
"values": formatted_values,
|
|
"detailed_errors": error_details,
|
|
"stats": stats,
|
|
"timestamp": read_result.get(
|
|
"timestamp", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
),
|
|
"warning": read_result.get("warning"),
|
|
"source": data_source,
|
|
"is_cached": True,
|
|
"cache_info": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": f"Error retrieving cached variable values: {str(e)}",
|
|
"values": {},
|
|
}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
# Dataset Management API Endpoints
|
|
|
|
|
|
@app.route("/api/datasets", methods=["GET"])
|
|
def get_datasets():
|
|
"""Get all datasets"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"datasets": streamer.datasets,
|
|
"active_datasets": list(streamer.active_datasets),
|
|
"current_dataset_id": streamer.current_dataset_id,
|
|
}
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets", methods=["POST"])
|
|
def create_dataset():
|
|
"""Create a new dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
dataset_id = data.get("dataset_id", "").strip()
|
|
name = data.get("name", "").strip()
|
|
prefix = data.get("prefix", "").strip()
|
|
sampling_interval = data.get("sampling_interval")
|
|
|
|
if not dataset_id or not name or not prefix:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Dataset ID, name, and prefix are required",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Validate sampling interval if provided
|
|
if sampling_interval is not None:
|
|
sampling_interval = float(sampling_interval)
|
|
if sampling_interval < 0.01:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Sampling interval must be at least 0.01 seconds",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
streamer.create_dataset(dataset_id, name, prefix, sampling_interval)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Dataset '{name}' created successfully",
|
|
"dataset_id": dataset_id,
|
|
}
|
|
)
|
|
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
except Exception as e:
|
|
return (
|
|
jsonify({"success": False, "message": f"Error creating dataset: {str(e)}"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>", methods=["DELETE"])
|
|
def delete_dataset(dataset_id):
|
|
"""Delete a dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.delete_dataset(dataset_id)
|
|
return jsonify({"success": True, "message": "Dataset deleted successfully"})
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 404
|
|
except Exception as e:
|
|
return (
|
|
jsonify({"success": False, "message": f"Error deleting dataset: {str(e)}"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>/activate", methods=["POST"])
|
|
def activate_dataset(dataset_id):
|
|
"""Activate a dataset for streaming"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.activate_dataset(dataset_id)
|
|
return jsonify({"success": True, "message": "Dataset activated successfully"})
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 404
|
|
except RuntimeError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{"success": False, "message": f"Error activating dataset: {str(e)}"}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>/deactivate", methods=["POST"])
|
|
def deactivate_dataset(dataset_id):
|
|
"""Deactivate a dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.deactivate_dataset(dataset_id)
|
|
return jsonify({"success": True, "message": "Dataset deactivated successfully"})
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 404
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{"success": False, "message": f"Error deactivating dataset: {str(e)}"}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>/variables", methods=["POST"])
|
|
def add_variable_to_dataset(dataset_id):
|
|
"""Add a variable to a dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
name = data.get("name", "").strip()
|
|
area = data.get("area", "").strip()
|
|
db = data.get("db", 1)
|
|
offset = data.get("offset", 0)
|
|
var_type = data.get("type", "").strip()
|
|
bit = data.get("bit")
|
|
streaming = data.get("streaming", False)
|
|
|
|
if not name or not area or not var_type:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": "Variable name, area, and type are required",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
streamer.add_variable_to_dataset(
|
|
dataset_id, name, area, db, offset, var_type, bit, streaming
|
|
)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Variable '{name}' added to dataset successfully",
|
|
}
|
|
)
|
|
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
except Exception as e:
|
|
return (
|
|
jsonify({"success": False, "message": f"Error adding variable: {str(e)}"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/<dataset_id>/variables/<variable_name>", methods=["DELETE"])
|
|
def remove_variable_from_dataset(dataset_id, variable_name):
|
|
"""Remove a variable from a dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.remove_variable_from_dataset(dataset_id, variable_name)
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Variable '{variable_name}' removed from dataset successfully",
|
|
}
|
|
)
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 404
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{"success": False, "message": f"Error removing variable: {str(e)}"}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route(
|
|
"/api/datasets/<dataset_id>/variables/<variable_name>/streaming", methods=["POST"]
|
|
)
|
|
def toggle_variable_streaming_in_dataset(dataset_id, variable_name):
|
|
"""Toggle streaming for a variable in a dataset"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
enabled = data.get("enabled", False)
|
|
|
|
streamer.toggle_variable_streaming(dataset_id, variable_name, enabled)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Variable '{variable_name}' streaming {'enabled' if enabled else 'disabled'}",
|
|
}
|
|
)
|
|
except ValueError as e:
|
|
return jsonify({"success": False, "message": str(e)}), 404
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{"success": False, "message": f"Error toggling streaming: {str(e)}"}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/datasets/current", methods=["POST"])
|
|
def set_current_dataset():
|
|
"""Set the current dataset for editing"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
dataset_id = data.get("dataset_id")
|
|
|
|
if dataset_id and dataset_id in streamer.datasets:
|
|
streamer.current_dataset_id = dataset_id
|
|
# Note: No need to save - this is just changing current selection in memory
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": "Current dataset updated successfully",
|
|
"current_dataset_id": dataset_id,
|
|
}
|
|
)
|
|
else:
|
|
return jsonify({"success": False, "message": "Invalid dataset ID"}), 400
|
|
|
|
except Exception as e:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"message": f"Error setting current dataset: {str(e)}",
|
|
}
|
|
),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/streaming/start", methods=["POST"])
|
|
def start_streaming():
|
|
"""Start UDP streaming (legacy endpoint - now only starts UDP streaming)"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
if streamer.start_streaming():
|
|
return jsonify({"success": True, "message": "UDP streaming started"})
|
|
else:
|
|
return (
|
|
jsonify({"success": False, "message": "Error starting UDP streaming"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/streaming/stop", methods=["POST"])
|
|
def stop_streaming():
|
|
"""Stop UDP streaming (legacy endpoint - now only stops UDP streaming)"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
streamer.stop_streaming()
|
|
return jsonify({"success": True, "message": "UDP streaming stopped"})
|
|
|
|
|
|
@app.route("/api/sampling", methods=["POST"])
|
|
def update_sampling():
|
|
"""Update sampling interval"""
|
|
try:
|
|
data = request.get_json()
|
|
interval = float(data.get("interval", 0.1))
|
|
|
|
if interval < 0.01:
|
|
interval = 0.01
|
|
|
|
streamer.update_sampling_interval(interval)
|
|
return jsonify({"success": True, "message": f"Interval updated to {interval}s"})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "message": str(e)}), 400
|
|
|
|
|
|
@app.route("/api/csv/start", methods=["POST"])
|
|
def start_csv_recording_legacy():
|
|
"""Start CSV recording independently (legacy endpoint)"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
if streamer.data_streamer.start_csv_recording():
|
|
return jsonify({"success": True, "message": "CSV recording started"})
|
|
else:
|
|
return (
|
|
jsonify({"success": False, "message": "Error starting CSV recording"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/csv/stop", methods=["POST"])
|
|
def stop_csv_recording_legacy():
|
|
"""Stop CSV recording independently (legacy endpoint)"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
streamer.data_streamer.stop_csv_recording()
|
|
return jsonify({"success": True, "message": "CSV recording stopped"})
|
|
|
|
|
|
@app.route("/api/csv/recording/start", methods=["POST"])
|
|
def start_csv_recording():
|
|
"""Start CSV recording independently of UDP streaming"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
if streamer.data_streamer.start_csv_recording():
|
|
return jsonify({"success": True, "message": "CSV recording started"})
|
|
else:
|
|
return (
|
|
jsonify({"success": False, "message": "Error starting CSV recording"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/csv/recording/stop", methods=["POST"])
|
|
def stop_csv_recording():
|
|
"""Stop CSV recording independently of UDP streaming"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
streamer.data_streamer.stop_csv_recording()
|
|
return jsonify({"success": True, "message": "CSV recording stopped"})
|
|
|
|
|
|
# 🔑 NEW: UDP Streaming Control (Independent)
|
|
@app.route("/api/udp/streaming/start", methods=["POST"])
|
|
def start_udp_streaming():
|
|
"""Start UDP streaming to PlotJuggler independently of CSV recording"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
if streamer.data_streamer.start_udp_streaming():
|
|
return jsonify(
|
|
{"success": True, "message": "UDP streaming to PlotJuggler started"}
|
|
)
|
|
else:
|
|
return (
|
|
jsonify({"success": False, "message": "Error starting UDP streaming"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@app.route("/api/udp/streaming/stop", methods=["POST"])
|
|
def stop_udp_streaming():
|
|
"""Stop UDP streaming to PlotJuggler independently of CSV recording"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
streamer.data_streamer.stop_udp_streaming()
|
|
return jsonify({"success": True, "message": "UDP streaming to PlotJuggler stopped"})
|
|
|
|
|
|
# 🔍 HEALTH CHECK ENDPOINT
|
|
@app.route("/api/health", methods=["GET"])
|
|
def health_check():
|
|
"""Simple health check endpoint"""
|
|
try:
|
|
streamer_status = "initialized" if streamer is not None else "not_initialized"
|
|
plot_manager_status = (
|
|
"available"
|
|
if (
|
|
streamer
|
|
and hasattr(streamer, "data_streamer")
|
|
and hasattr(streamer.data_streamer, "plot_manager")
|
|
)
|
|
else "not_available"
|
|
)
|
|
|
|
return jsonify(
|
|
{
|
|
"status": "ok",
|
|
"streamer": streamer_status,
|
|
"plot_manager": plot_manager_status,
|
|
"timestamp": time.time(),
|
|
}
|
|
)
|
|
except Exception as e:
|
|
return (
|
|
jsonify({"status": "error", "error": str(e), "timestamp": time.time()}),
|
|
500,
|
|
)
|
|
|
|
|
|
# 📈 PLOT MANAGER API ENDPOINTS
|
|
@app.route("/api/plots", methods=["GET"])
|
|
def get_plots():
|
|
"""Get all plot sessions status"""
|
|
print("🔍 DEBUG: /api/plots endpoint called")
|
|
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
print("❌ DEBUG: Streamer not initialized")
|
|
return error_response
|
|
|
|
print("✅ DEBUG: Streamer is initialized")
|
|
|
|
try:
|
|
print("🔍 DEBUG: Accessing streamer.data_streamer.plot_manager...")
|
|
plot_manager = streamer.data_streamer.plot_manager
|
|
print(f"✅ DEBUG: Plot manager obtained: {type(plot_manager)}")
|
|
|
|
print("🔍 DEBUG: Calling get_all_sessions_status()...")
|
|
sessions = plot_manager.get_all_sessions_status()
|
|
print(f"✅ DEBUG: Sessions obtained: {len(sessions)} sessions")
|
|
print(f"📊 DEBUG: Sessions data: {sessions}")
|
|
|
|
result = {"sessions": sessions}
|
|
print(f"✅ DEBUG: Returning result: {result}")
|
|
return jsonify(result)
|
|
except Exception as e:
|
|
print(f"💥 DEBUG: Exception in get_plots: {type(e).__name__}: {str(e)}")
|
|
import traceback
|
|
|
|
print(f"📋 DEBUG: Full traceback:")
|
|
traceback.print_exc()
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots", methods=["POST"])
|
|
def create_plot():
|
|
"""Create a new plot session"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
|
|
# Validar datos requeridos
|
|
if not data.get("variables"):
|
|
return jsonify({"error": "At least one variable is required"}), 400
|
|
|
|
if not data.get("time_window"):
|
|
return jsonify({"error": "Time window is required"}), 400
|
|
|
|
# Validar que las variables existen en datasets activos
|
|
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
invalid_vars = [var for var in data["variables"] if var not in available_vars]
|
|
if invalid_vars:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"error": f"Variables not available: {', '.join(invalid_vars)}",
|
|
"available_variables": available_vars,
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Validar trigger si está habilitado
|
|
if data.get("trigger_enabled") and data.get("trigger_variable"):
|
|
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
if data["trigger_variable"] not in boolean_vars:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"error": f"Trigger variable '{data['trigger_variable']}' is not a boolean variable",
|
|
"boolean_variables": boolean_vars,
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Crear configuración de la sesión
|
|
config = {
|
|
"id": data.get("id"), # Use the plot ID from the definition
|
|
"name": data.get(
|
|
"name", f"Plot {len(streamer.data_streamer.plot_manager.sessions) + 1}"
|
|
),
|
|
"variables": data["variables"],
|
|
"time_window": int(data["time_window"]),
|
|
"y_min": data.get("y_min"),
|
|
"y_max": data.get("y_max"),
|
|
"trigger_variable": data.get("trigger_variable"),
|
|
"trigger_enabled": data.get("trigger_enabled", False),
|
|
"trigger_on_true": data.get("trigger_on_true", True),
|
|
}
|
|
|
|
# Convertir valores numéricos si están presentes
|
|
if config["y_min"] is not None:
|
|
config["y_min"] = float(config["y_min"])
|
|
if config["y_max"] is not None:
|
|
config["y_max"] = float(config["y_max"])
|
|
|
|
session_id = streamer.data_streamer.plot_manager.create_session(config)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"session_id": session_id,
|
|
"message": f"Plot session '{config['name']}' created successfully",
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/<session_id>", methods=["DELETE"])
|
|
def remove_plot(session_id):
|
|
"""Remove a plot session"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
success = streamer.data_streamer.plot_manager.remove_session(session_id)
|
|
if success:
|
|
return jsonify({"success": True, "message": "Plot session removed"})
|
|
else:
|
|
return jsonify({"error": "Plot session not found"}), 404
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/<session_id>/control", methods=["POST"])
|
|
def control_plot(session_id):
|
|
"""Control a plot session (start/stop/pause/resume/clear)"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
action = data.get("action")
|
|
|
|
if action not in ["start", "stop", "pause", "resume", "clear"]:
|
|
return (
|
|
jsonify(
|
|
{"error": "Invalid action. Use: start, stop, pause, resume, clear"}
|
|
),
|
|
400,
|
|
)
|
|
|
|
success = streamer.data_streamer.plot_manager.control_session(
|
|
session_id, action
|
|
)
|
|
if success:
|
|
return jsonify({"success": True, "message": f"Plot session {action}ed"})
|
|
else:
|
|
return jsonify({"error": "Plot session not found"}), 404
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/<session_id>/data", methods=["GET"])
|
|
def get_plot_data(session_id):
|
|
"""Get plot data for a specific session"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
plot_data = streamer.data_streamer.plot_manager.get_session_data(session_id)
|
|
if plot_data:
|
|
return jsonify(plot_data)
|
|
else:
|
|
return jsonify({"error": "Plot session not found"}), 404
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/<session_id>/config", methods=["GET"])
|
|
def get_plot_config(session_id):
|
|
"""Get plot configuration for a specific session"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
config = streamer.data_streamer.plot_manager.get_session_config(session_id)
|
|
if config:
|
|
return jsonify({"success": True, "config": config})
|
|
else:
|
|
return jsonify({"error": "Plot session not found"}), 404
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/<session_id>/config", methods=["PUT"])
|
|
def update_plot_config(session_id):
|
|
"""Update plot configuration for a specific session"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
data = request.get_json()
|
|
|
|
# Validar datos requeridos
|
|
if not data.get("variables"):
|
|
return jsonify({"error": "At least one variable is required"}), 400
|
|
|
|
if not data.get("time_window"):
|
|
return jsonify({"error": "Time window is required"}), 400
|
|
|
|
# Validar que las variables existen en datasets activos
|
|
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
invalid_vars = [var for var in data["variables"] if var not in available_vars]
|
|
if invalid_vars:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"error": f"Variables not available: {', '.join(invalid_vars)}",
|
|
"available_variables": available_vars,
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Validar trigger si está habilitado
|
|
if data.get("trigger_enabled") and data.get("trigger_variable"):
|
|
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
if data["trigger_variable"] not in boolean_vars:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"error": f"Trigger variable '{data['trigger_variable']}' is not a boolean variable",
|
|
"boolean_variables": boolean_vars,
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
# Crear configuración actualizada
|
|
config = {
|
|
"name": data.get("name", f"Plot {session_id}"),
|
|
"variables": data["variables"],
|
|
"time_window": int(data["time_window"]),
|
|
"y_min": data.get("y_min"),
|
|
"y_max": data.get("y_max"),
|
|
"trigger_variable": data.get("trigger_variable"),
|
|
"trigger_enabled": data.get("trigger_enabled", False),
|
|
"trigger_on_true": data.get("trigger_on_true", True),
|
|
}
|
|
|
|
# Convertir valores numéricos si están presentes
|
|
if config["y_min"] is not None:
|
|
config["y_min"] = float(config["y_min"])
|
|
if config["y_max"] is not None:
|
|
config["y_max"] = float(config["y_max"])
|
|
|
|
# Actualizar configuración
|
|
success = streamer.data_streamer.plot_manager.update_session_config(
|
|
session_id, config
|
|
)
|
|
|
|
if success:
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"message": f"Plot session '{config['name']}' updated successfully",
|
|
}
|
|
)
|
|
else:
|
|
return jsonify({"error": "Plot session not found"}), 404
|
|
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plots/variables", methods=["GET"])
|
|
def get_plot_variables():
|
|
"""Get available variables for plotting"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
|
streamer.data_streamer.get_active_datasets(),
|
|
streamer.config_manager.datasets,
|
|
)
|
|
|
|
return jsonify(
|
|
{
|
|
"available_variables": available_vars,
|
|
"boolean_variables": boolean_vars,
|
|
"active_datasets_count": len(
|
|
streamer.data_streamer.get_active_datasets()
|
|
),
|
|
}
|
|
)
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/status")
|
|
def get_status():
|
|
"""Get current status"""
|
|
if streamer is None:
|
|
return jsonify({"error": "Application not initialized"}), 503
|
|
return jsonify(streamer.get_status())
|
|
|
|
|
|
@app.route("/api/plc/reconnection/status")
|
|
def get_plc_reconnection_status():
|
|
"""Get detailed PLC reconnection status"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
status = streamer.plc_client.get_reconnection_status()
|
|
connection_info = streamer.plc_client.get_connection_info()
|
|
|
|
return jsonify(
|
|
{"success": True, "reconnection": status, "connection": connection_info}
|
|
)
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plc/reconnection/enable", methods=["POST"])
|
|
def enable_plc_reconnection():
|
|
"""Enable automatic PLC reconnection"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.plc_client.enable_automatic_reconnection(True)
|
|
return jsonify(
|
|
{"success": True, "message": "Automatic PLC reconnection enabled"}
|
|
)
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/plc/reconnection/disable", methods=["POST"])
|
|
def disable_plc_reconnection():
|
|
"""Disable automatic PLC reconnection"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
streamer.plc_client.enable_automatic_reconnection(False)
|
|
return jsonify(
|
|
{"success": True, "message": "Automatic PLC reconnection disabled"}
|
|
)
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/events")
|
|
def get_events():
|
|
"""Get recent events from the application log"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
try:
|
|
limit = request.args.get("limit", 50, type=int)
|
|
limit = min(limit, 200) # Maximum 200 events per request
|
|
|
|
events = streamer.get_recent_events(limit)
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"events": events,
|
|
"total_events": len(streamer.event_logger.events_log),
|
|
"showing": len(events),
|
|
}
|
|
)
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/stream/variables", methods=["GET"])
|
|
def stream_variables():
|
|
"""Stream variable values in real-time using Server-Sent Events"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
# Get request parameters outside the generator
|
|
dataset_id = request.args.get("dataset_id")
|
|
interval = float(request.args.get("interval", 1.0)) # Default 1 second
|
|
|
|
if not dataset_id:
|
|
return jsonify({"error": "Dataset ID required"}), 400
|
|
|
|
if dataset_id not in streamer.datasets:
|
|
return jsonify({"error": f"Dataset {dataset_id} not found"}), 404
|
|
|
|
def generate():
|
|
"""Generate SSE data stream - CACHE ONLY"""
|
|
|
|
# Send initial connection message
|
|
yield f"data: {json.dumps({'type': 'connected', 'message': 'SSE connection established - monitoring cache'})}\n\n"
|
|
|
|
last_values = {}
|
|
|
|
while True:
|
|
try:
|
|
# Check basic preconditions for cache availability
|
|
if not streamer.plc_client.is_connected():
|
|
# PLC not connected - cache won't be populated
|
|
data = {
|
|
"type": "plc_disconnected",
|
|
"message": "PLC not connected - cache not being populated",
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
time.sleep(interval)
|
|
continue
|
|
|
|
# Check if dataset is active
|
|
if dataset_id not in streamer.active_datasets:
|
|
data = {
|
|
"type": "dataset_inactive",
|
|
"message": f"Dataset '{dataset_id}' is not active - activate to populate cache",
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
time.sleep(interval)
|
|
continue
|
|
|
|
# Get dataset variables
|
|
dataset_variables = streamer.get_dataset_variables(dataset_id)
|
|
|
|
if not dataset_variables:
|
|
# No variables in dataset
|
|
data = {
|
|
"type": "no_variables",
|
|
"message": "No variables defined in this dataset",
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
time.sleep(interval)
|
|
continue
|
|
|
|
# Get cached values - the ONLY source according to application principles
|
|
if not streamer.has_cached_values(dataset_id):
|
|
# No cache available yet - dataset might be starting up
|
|
sampling_interval = (
|
|
streamer.config_manager.get_dataset_sampling_interval(
|
|
dataset_id
|
|
)
|
|
)
|
|
data = {
|
|
"type": "no_cache",
|
|
"message": f"Waiting for cache to be populated (dataset reads every {sampling_interval}s)",
|
|
"timestamp": datetime.now().isoformat(),
|
|
"sampling_interval": sampling_interval,
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
time.sleep(interval)
|
|
continue
|
|
|
|
# Get cached values (the ONLY valid source)
|
|
read_result = streamer.get_cached_dataset_values(dataset_id)
|
|
|
|
if read_result.get("success", False):
|
|
values = read_result.get("values", {})
|
|
timestamp = read_result.get("timestamp", datetime.now().isoformat())
|
|
|
|
# Format values for display
|
|
formatted_values = {}
|
|
for var_name, value in values.items():
|
|
if value is not None:
|
|
var_config = dataset_variables[var_name]
|
|
var_type = var_config.get("type", "unknown")
|
|
|
|
try:
|
|
if var_type == "real":
|
|
formatted_values[var_name] = (
|
|
f"{value:.3f}"
|
|
if isinstance(value, (int, float))
|
|
else str(value)
|
|
)
|
|
elif var_type == "bool":
|
|
formatted_values[var_name] = (
|
|
"TRUE" if value else "FALSE"
|
|
)
|
|
elif var_type in [
|
|
"int",
|
|
"uint",
|
|
"dint",
|
|
"udint",
|
|
"word",
|
|
"byte",
|
|
"sint",
|
|
"usint",
|
|
]:
|
|
formatted_values[var_name] = (
|
|
str(int(value))
|
|
if isinstance(value, (int, float))
|
|
else str(value)
|
|
)
|
|
else:
|
|
formatted_values[var_name] = str(value)
|
|
except:
|
|
formatted_values[var_name] = "FORMAT_ERROR"
|
|
else:
|
|
formatted_values[var_name] = "ERROR"
|
|
|
|
# Only send if values changed
|
|
if formatted_values != last_values:
|
|
data = {
|
|
"type": "values",
|
|
"values": formatted_values,
|
|
"timestamp": timestamp,
|
|
"source": "cache",
|
|
"stats": read_result.get("stats", {}),
|
|
"cache_age_info": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
last_values = formatted_values.copy()
|
|
else:
|
|
# Send error data from cache
|
|
error_data = {
|
|
"type": "cache_error",
|
|
"message": read_result.get(
|
|
"error", "Cached data indicates error"
|
|
),
|
|
"timestamp": datetime.now().isoformat(),
|
|
"error_type": read_result.get("error_type", "unknown"),
|
|
"source": "cache",
|
|
}
|
|
yield f"data: {json.dumps(error_data)}\n\n"
|
|
|
|
time.sleep(interval)
|
|
|
|
except Exception as e:
|
|
error_data = {
|
|
"type": "stream_error",
|
|
"message": f"SSE stream error: {str(e)}",
|
|
"timestamp": datetime.now().isoformat(),
|
|
"source": "cache_monitoring",
|
|
}
|
|
yield f"data: {json.dumps(error_data)}\n\n"
|
|
time.sleep(interval)
|
|
|
|
return Response(
|
|
generate(),
|
|
mimetype="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache",
|
|
"Connection": "keep-alive",
|
|
"Access-Control-Allow-Origin": "*",
|
|
"Access-Control-Allow-Headers": "Cache-Control",
|
|
},
|
|
)
|
|
|
|
|
|
@app.route("/api/stream/status", methods=["GET"])
|
|
def stream_status():
|
|
"""Stream application status in real-time using Server-Sent Events"""
|
|
error_response = check_streamer_initialized()
|
|
if error_response:
|
|
return error_response
|
|
|
|
# Get request parameters outside the generator
|
|
interval = float(request.args.get("interval", 2.0)) # Default 2 seconds
|
|
|
|
def generate():
|
|
"""Generate SSE status stream"""
|
|
last_status = None
|
|
|
|
# Send initial connection message
|
|
yield f"data: {json.dumps({'type': 'connected', 'message': 'Status stream connected'})}\n\n"
|
|
|
|
while True:
|
|
try:
|
|
# Get current status
|
|
current_status = streamer.get_status()
|
|
|
|
# Only send if status changed
|
|
if current_status != last_status:
|
|
data = {
|
|
"type": "status",
|
|
"status": current_status,
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
yield f"data: {json.dumps(data)}\n\n"
|
|
last_status = current_status
|
|
|
|
time.sleep(interval)
|
|
|
|
except Exception as e:
|
|
error_data = {
|
|
"type": "error",
|
|
"message": f"Status stream error: {str(e)}",
|
|
"timestamp": datetime.now().isoformat(),
|
|
}
|
|
yield f"data: {json.dumps(error_data)}\n\n"
|
|
time.sleep(interval)
|
|
|
|
return Response(
|
|
generate(),
|
|
mimetype="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache",
|
|
"Connection": "keep-alive",
|
|
"Access-Control-Allow-Origin": "*",
|
|
"Access-Control-Allow-Headers": "Cache-Control",
|
|
},
|
|
)
|
|
|
|
|
|
def graceful_shutdown():
|
|
"""Perform graceful shutdown"""
|
|
print("\n⏹️ Performing graceful shutdown...")
|
|
try:
|
|
streamer.stop_streaming()
|
|
streamer.disconnect_plc()
|
|
streamer.release_instance_lock()
|
|
print("✅ Shutdown completed successfully")
|
|
except Exception as e:
|
|
print(f"⚠️ Error during shutdown: {e}")
|
|
|
|
|
|
def main():
|
|
"""Main application entry point with error handling and recovery"""
|
|
max_retries = 3
|
|
retry_count = 0
|
|
|
|
while retry_count < max_retries:
|
|
try:
|
|
print("🚀 Starting Flask server for PLC S7-315 Streamer")
|
|
print("📊 Web interface available at: http://localhost:5050")
|
|
print("🔧 Configure your PLC and variables through the web interface")
|
|
|
|
# Initialize streamer (this will handle instance locking and auto-recovery)
|
|
global streamer
|
|
|
|
# Start Flask application
|
|
app.run(debug=False, host="0.0.0.0", port=5050, use_reloader=False)
|
|
|
|
# If we reach here, the server stopped normally
|
|
break
|
|
|
|
except RuntimeError as e:
|
|
if "Another instance" in str(e):
|
|
print(f"❌ {e}")
|
|
print("💡 Tip: Stop the other instance or wait for it to finish")
|
|
sys.exit(1)
|
|
else:
|
|
print(f"⚠️ Runtime error: {e}")
|
|
retry_count += 1
|
|
|
|
except KeyboardInterrupt:
|
|
print("\n⏸️ Received interrupt signal...")
|
|
graceful_shutdown()
|
|
break
|
|
|
|
except Exception as e:
|
|
print(f"💥 Unexpected error: {e}")
|
|
retry_count += 1
|
|
|
|
if retry_count < max_retries:
|
|
print(f"🔄 Attempting restart ({retry_count}/{max_retries})...")
|
|
time.sleep(2) # Wait before retry
|
|
else:
|
|
print("❌ Maximum retries reached. Exiting...")
|
|
graceful_shutdown()
|
|
sys.exit(1)
|
|
|
|
|
|
# ==============================================================================
|
|
# Symbol Management API Endpoints
|
|
# ==============================================================================
|
|
|
|
|
|
@app.route("/api/utils/browse-file", methods=["POST"])
|
|
def browse_file():
|
|
"""Open file dialog to browse for ASC symbol files."""
|
|
try:
|
|
if not TKINTER_AVAILABLE:
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"error": "File browser not available. Please enter the file path manually.",
|
|
}
|
|
),
|
|
400,
|
|
)
|
|
|
|
data = request.get_json()
|
|
title = data.get("title", "Select File")
|
|
filetypes = data.get("filetypes", [["All Files", "*.*"]])
|
|
|
|
# Create a temporary tkinter root window
|
|
root = tk.Tk()
|
|
root.withdraw() # Hide the root window
|
|
root.attributes("-topmost", True) # Bring to front
|
|
|
|
# Open file dialog
|
|
file_path = filedialog.askopenfilename(title=title, filetypes=filetypes)
|
|
|
|
root.destroy() # Clean up
|
|
|
|
if file_path:
|
|
return jsonify({"success": True, "file_path": file_path})
|
|
else:
|
|
return jsonify({"success": True, "cancelled": True})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/symbols/load", methods=["POST"])
|
|
def load_symbols():
|
|
"""Load symbols from ASC file and save to JSON."""
|
|
try:
|
|
data = request.get_json()
|
|
asc_file_path = data.get("asc_file_path")
|
|
|
|
if not asc_file_path:
|
|
return (
|
|
jsonify({"success": False, "error": "ASC file path is required"}),
|
|
400,
|
|
)
|
|
|
|
if not os.path.exists(asc_file_path):
|
|
return (
|
|
jsonify(
|
|
{"success": False, "error": f"ASC file not found: {asc_file_path}"}
|
|
),
|
|
404,
|
|
)
|
|
|
|
# Initialize symbol loader with optional logger
|
|
logger = None
|
|
try:
|
|
logger = (
|
|
streamer.event_logger if "streamer" in globals() and streamer else None
|
|
)
|
|
except:
|
|
pass # Use None logger if streamer is not available
|
|
|
|
symbol_loader = SymbolLoader(logger)
|
|
|
|
# Load symbols and save to JSON
|
|
json_output_path = project_path("config", "data", "plc_symbols.json")
|
|
|
|
# Ensure the directory exists
|
|
os.makedirs(os.path.dirname(json_output_path), exist_ok=True)
|
|
|
|
symbols_count = symbol_loader.load_asc_and_save_json(
|
|
asc_file_path, json_output_path
|
|
)
|
|
|
|
# Log the success
|
|
if logger:
|
|
logger.log_event(
|
|
"info",
|
|
"symbols_loaded",
|
|
f"Loaded {symbols_count} symbols from {asc_file_path}",
|
|
)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"symbols_count": symbols_count,
|
|
"json_path": json_output_path,
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
error_msg = f"Error loading symbols: {str(e)}"
|
|
print(f"DEBUG - Symbol loading error: {error_msg}") # Debug print
|
|
print(f"DEBUG - Exception type: {type(e).__name__}")
|
|
|
|
# Print full stack trace
|
|
import traceback
|
|
|
|
print("DEBUG - Full stack trace:")
|
|
traceback.print_exc()
|
|
|
|
# Log the error
|
|
if "streamer" in globals() and streamer and streamer.event_logger:
|
|
streamer.event_logger.log_event("error", "symbols_load_failed", error_msg)
|
|
|
|
return jsonify({"success": False, "error": error_msg}), 500
|
|
|
|
|
|
@app.route("/api/symbols", methods=["GET"])
|
|
def get_symbols():
|
|
"""Get loaded symbols from JSON file."""
|
|
try:
|
|
json_path = project_path("config", "data", "plc_symbols.json")
|
|
|
|
if not os.path.exists(json_path):
|
|
return jsonify({"success": True, "symbols": [], "total_count": 0})
|
|
|
|
with open(json_path, "r", encoding="utf-8") as file:
|
|
symbols_data = json.load(file)
|
|
|
|
return jsonify({"success": True, **symbols_data})
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/symbols/search", methods=["POST"])
|
|
def search_symbols():
|
|
"""Search symbols by name or description."""
|
|
try:
|
|
data = request.get_json()
|
|
query = data.get("query", "").lower()
|
|
limit = data.get("limit", 50)
|
|
|
|
json_path = project_path("config", "data", "plc_symbols.json")
|
|
|
|
if not os.path.exists(json_path):
|
|
return jsonify({"success": True, "symbols": [], "total_count": 0})
|
|
|
|
with open(json_path, "r", encoding="utf-8") as file:
|
|
symbols_data = json.load(file)
|
|
|
|
all_symbols = symbols_data.get("symbols", [])
|
|
|
|
if not query:
|
|
# Return first N symbols if no query
|
|
filtered_symbols = all_symbols[:limit]
|
|
else:
|
|
# Search in name and description
|
|
filtered_symbols = []
|
|
for symbol in all_symbols:
|
|
if (
|
|
query in symbol.get("name", "").lower()
|
|
or query in symbol.get("description", "").lower()
|
|
):
|
|
filtered_symbols.append(symbol)
|
|
|
|
if len(filtered_symbols) >= limit:
|
|
break
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"symbols": filtered_symbols,
|
|
"total_count": len(filtered_symbols),
|
|
"query": query,
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
@app.route("/api/symbols/process-variables", methods=["POST"])
|
|
def process_symbol_variables():
|
|
"""Process dataset variables, expanding symbol-based ones."""
|
|
try:
|
|
data = request.get_json()
|
|
variables = data.get("variables", [])
|
|
|
|
if not variables:
|
|
return (
|
|
jsonify({"success": False, "error": "Variables array is required"}),
|
|
400,
|
|
)
|
|
|
|
# Initialize symbol processor with optional logger
|
|
logger = None
|
|
try:
|
|
logger = (
|
|
streamer.event_logger if "streamer" in globals() and streamer else None
|
|
)
|
|
except:
|
|
pass # Use None logger if streamer is not available
|
|
|
|
symbol_processor = SymbolProcessor(logger)
|
|
|
|
# Get symbols path
|
|
symbols_path = project_path("config", "data", "plc_symbols.json")
|
|
|
|
if not os.path.exists(symbols_path):
|
|
return (
|
|
jsonify(
|
|
{
|
|
"success": False,
|
|
"error": "No symbols file found. Please load an ASC file first.",
|
|
}
|
|
),
|
|
404,
|
|
)
|
|
|
|
# Process variables
|
|
processed_variables = symbol_processor.process_dataset_variables(
|
|
variables, symbols_path
|
|
)
|
|
|
|
# Validate the processed variables
|
|
validation = symbol_processor.validate_symbol_variables(variables, symbols_path)
|
|
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"processed_variables": processed_variables,
|
|
"validation": validation,
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
return jsonify({"success": False, "error": str(e)}), 500
|
|
|
|
|
|
if __name__ == "__main__":
|
|
try:
|
|
# Initialize streamer instance
|
|
streamer = PLCDataStreamer()
|
|
main()
|
|
except Exception as e:
|
|
print(f"💥 Critical error during initialization: {e}")
|
|
sys.exit(1)
|