4177 lines
145 KiB
Python
4177 lines
145 KiB
Python
from flask import (
|
||
Flask,
|
||
request,
|
||
jsonify,
|
||
send_from_directory,
|
||
Response,
|
||
)
|
||
from flask_cors import CORS
|
||
import json
|
||
import time
|
||
import signal
|
||
import sys
|
||
import requests # For HTTP health checks
|
||
from datetime import datetime, timedelta, timezone
|
||
import os
|
||
import logging
|
||
|
||
# 📝 ROTATING LOGGER SYSTEM
|
||
from core.rotating_logger import setup_backend_logging
|
||
|
||
# <20> OPTIMIZATION CONFIGURATION
|
||
# Control PLC batch reading optimization globally
|
||
# Set to False to use individual reads (compatibility mode) - RECOMMENDED for small datasets
|
||
# Set to True to use optimized batch reads with read_multi_vars (only for large datasets > 20 vars)
|
||
USE_OPTIMIZED_BATCH_READING = False
|
||
|
||
# <20>📝 Setup rotating logger system
|
||
# This will log to both console and .logs/ directory with automatic rotation
|
||
backend_logger = setup_backend_logging(
|
||
log_dir=".logs", # Directory for log files
|
||
max_lines=10000, # Max lines per file
|
||
max_files=30, # Max files to retain
|
||
)
|
||
|
||
# Configure standard logging to use our rotating system
|
||
# Reduce Flask's request logging to ERROR level only (keep logs clean)
|
||
logging.getLogger("werkzeug").setLevel(logging.ERROR)
|
||
|
||
try:
|
||
import tkinter as tk
|
||
from tkinter import filedialog
|
||
|
||
TKINTER_AVAILABLE = True
|
||
except ImportError:
|
||
TKINTER_AVAILABLE = False
|
||
print("Warning: tkinter not available. File browse functionality will be limited.")
|
||
|
||
# System Tray Icon imports
|
||
try:
|
||
import pystray
|
||
from PIL import Image
|
||
import threading
|
||
|
||
TRAY_AVAILABLE = True
|
||
except ImportError:
|
||
TRAY_AVAILABLE = False
|
||
print(
|
||
"Warning: pystray/PIL not available. System tray functionality will be disabled."
|
||
)
|
||
|
||
# Import core modules
|
||
from core import PLCDataStreamer
|
||
from core.historical_cache import HistoricalDataCache
|
||
from utils.json_manager import JSONManager, SchemaManager
|
||
from utils.symbol_loader import SymbolLoader
|
||
from utils.symbol_processor import SymbolProcessor
|
||
from utils.instance_manager import InstanceManager
|
||
|
||
|
||
def check_backend_instance_robust(
|
||
port: int = 5050, lock_file: str = "plc_streamer.lock"
|
||
):
|
||
"""
|
||
🔒 ROBUST INSTANCE CHECK - HTTP + PID based verification
|
||
|
||
This function provides a more reliable way to detect existing backend instances:
|
||
1. Double HTTP health check with 5-second interval
|
||
2. PID verification and zombie process cleanup
|
||
3. Automatic lock file management
|
||
|
||
Args:
|
||
port: Backend server port (default: 5050)
|
||
lock_file: Lock file path (default: "plc_streamer.lock")
|
||
|
||
Returns:
|
||
Tuple[bool, str]: (can_proceed, message)
|
||
- can_proceed: True if this instance can start safely
|
||
- message: Detailed status message
|
||
"""
|
||
print("🔍 Starting robust backend instance verification...")
|
||
|
||
try:
|
||
# Initialize instance manager
|
||
instance_manager = InstanceManager(port=port, lock_file=lock_file)
|
||
|
||
# Perform comprehensive instance check
|
||
can_proceed, message = instance_manager.check_and_handle_existing_instance()
|
||
|
||
if can_proceed:
|
||
print(f"✅ {message}")
|
||
print("🔒 Initializing new backend instance...")
|
||
|
||
# Create lock file for this instance
|
||
if not instance_manager.initialize_instance():
|
||
return False, "❌ Failed to create instance lock file"
|
||
|
||
return True, "✅ Backend instance ready to start"
|
||
else:
|
||
print(f"🚫 {message}")
|
||
return False, message
|
||
|
||
except Exception as e:
|
||
error_msg = f"❌ Error during instance verification: {e}"
|
||
print(error_msg)
|
||
return False, error_msg
|
||
|
||
|
||
app = Flask(__name__)
|
||
CORS(
|
||
app,
|
||
resources={
|
||
r"/api/*": {
|
||
"origins": [
|
||
"http://localhost:5173",
|
||
"http://127.0.0.1:5173",
|
||
"*",
|
||
]
|
||
}
|
||
},
|
||
)
|
||
app.secret_key = "plc_streamer_secret_key"
|
||
|
||
|
||
def resource_path(relative_path):
|
||
"""Get absolute path to resource, works for dev and for PyInstaller"""
|
||
try:
|
||
# PyInstaller creates a temp folder and stores path in _MEIPASS
|
||
base_path = sys._MEIPASS
|
||
except Exception:
|
||
# Not running in a bundle
|
||
base_path = os.path.abspath(".")
|
||
|
||
return os.path.join(base_path, relative_path)
|
||
|
||
|
||
def external_path(relative_path):
|
||
"""Get path external to PyInstaller bundle (for persistent files like logs, state, etc.)"""
|
||
if getattr(sys, "frozen", False):
|
||
# Running as PyInstaller executable - use directory next to exe
|
||
executable_dir = os.path.dirname(sys.executable)
|
||
return os.path.join(executable_dir, relative_path)
|
||
else:
|
||
# Running as script - use current directory
|
||
return os.path.join(os.path.abspath("."), relative_path)
|
||
|
||
|
||
def project_path(*parts: str) -> str:
|
||
"""Build absolute path from the project root (based on this file)."""
|
||
base_dir = os.path.abspath(os.path.dirname(__file__))
|
||
return os.path.join(base_dir, *parts)
|
||
|
||
|
||
def get_records_directory():
|
||
"""Get the correct records directory path for both development and PyInstaller exe"""
|
||
if getattr(sys, "frozen", False):
|
||
# Running as PyInstaller executable - records should be next to the exe
|
||
executable_dir = os.path.dirname(sys.executable)
|
||
return os.path.join(executable_dir, "records")
|
||
else:
|
||
# Running as script - use current directory
|
||
return os.path.join(os.path.dirname(__file__), "records")
|
||
|
||
|
||
# React build directory (for Vite production build)
|
||
# Use resource_path to handle both development and PyInstaller scenarios
|
||
REACT_DIST_DIR = resource_path(os.path.join("frontend", "dist"))
|
||
|
||
|
||
# Global instances
|
||
streamer = None
|
||
historical_cache = None
|
||
json_manager = JSONManager()
|
||
schema_manager = SchemaManager()
|
||
|
||
|
||
# Global instances
|
||
streamer = None
|
||
historical_cache = None
|
||
json_manager = JSONManager()
|
||
schema_manager = SchemaManager()
|
||
|
||
|
||
def check_streamer_initialized():
|
||
"""Check if streamer is initialized, return error response if not"""
|
||
if streamer is None:
|
||
return jsonify({"error": "Application not initialized"}), 503
|
||
return None
|
||
|
||
|
||
@app.route("/images/<filename>")
|
||
def serve_image(filename):
|
||
"""Serve images from .images directory"""
|
||
return send_from_directory(".images", filename)
|
||
|
||
|
||
@app.route("/favicon.ico")
|
||
def serve_favicon():
|
||
"""Serve application favicon from React public folder.
|
||
|
||
Priority:
|
||
1) frontend/public/favicon.ico
|
||
2) frontend/public/record.png
|
||
"""
|
||
# Use resource_path for files embedded in PyInstaller bundle
|
||
public_dir = resource_path(os.path.join("frontend", "public"))
|
||
public_favicon = os.path.join(public_dir, "favicon.ico")
|
||
public_record = os.path.join(public_dir, "record.png")
|
||
|
||
if os.path.exists(public_favicon):
|
||
return send_from_directory(public_dir, "favicon.ico")
|
||
if os.path.exists(public_record):
|
||
return send_from_directory(public_dir, "record.png")
|
||
|
||
# Final fallback: 404
|
||
return Response("Favicon not found", status=404, mimetype="text/plain")
|
||
|
||
|
||
@app.route("/record.png")
|
||
def serve_public_record_png():
|
||
"""Serve /record.png from the React public folder."""
|
||
public_dir = resource_path(os.path.join("frontend", "public"))
|
||
public_record = os.path.join(public_dir, "record.png")
|
||
|
||
if os.path.exists(public_record):
|
||
return send_from_directory(public_dir, "record.png")
|
||
|
||
return Response("record.png not found", status=404, mimetype="text/plain")
|
||
|
||
|
||
@app.route("/SIDEL.png")
|
||
def serve_public_sidel_png():
|
||
"""Serve /SIDEL.png from the React public folder."""
|
||
public_dir = resource_path(os.path.join("frontend", "public"))
|
||
sidel_file = os.path.join(public_dir, "SIDEL.png")
|
||
if os.path.exists(sidel_file):
|
||
return send_from_directory(public_dir, "SIDEL.png")
|
||
return Response("SIDEL.png not found", status=404, mimetype="text/plain")
|
||
|
||
|
||
# ==============================
|
||
# Frontend (React SPA)
|
||
# ==============================
|
||
|
||
|
||
@app.route("/assets/<path:filename>")
|
||
def serve_react_assets(filename):
|
||
"""Serve built React assets from Vite (production)."""
|
||
assets_dir = os.path.join(REACT_DIST_DIR, "assets")
|
||
return send_from_directory(assets_dir, filename)
|
||
|
||
|
||
@app.route("/")
|
||
@app.route("/app")
|
||
@app.route("/app/<path:path>")
|
||
def serve_react_index(path: str = ""):
|
||
"""Serve React SPA index (expects Vite build at frontend/dist)."""
|
||
index_path = os.path.join(REACT_DIST_DIR, "index.html")
|
||
if not os.path.exists(index_path):
|
||
return Response(
|
||
"React build not found. Run 'cd frontend && npm install && npm run build' first.",
|
||
status=500,
|
||
mimetype="text/plain",
|
||
)
|
||
return send_from_directory(REACT_DIST_DIR, "index.html")
|
||
|
||
|
||
## Legacy UI removed after migration to React
|
||
|
||
|
||
# ==============================
|
||
# Unified JSON Configuration API
|
||
# ==============================
|
||
|
||
|
||
@app.route("/api/config/schemas", methods=["GET"])
|
||
def list_config_schemas():
|
||
"""List all available configuration schemas."""
|
||
try:
|
||
schemas = schema_manager.list_available_schemas()
|
||
return jsonify({"success": True, "schemas": schemas})
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/schema/<schema_id>", methods=["GET"])
|
||
def get_config_schema(schema_id):
|
||
"""Get a specific JSON schema with optional UI schema."""
|
||
try:
|
||
# Get main schema
|
||
schema = schema_manager.get_schema(schema_id)
|
||
if not schema:
|
||
return (
|
||
jsonify({"success": False, "error": f"Schema '{schema_id}' not found"}),
|
||
404,
|
||
)
|
||
|
||
# Get optional UI schema
|
||
ui_schema = schema_manager.get_ui_schema(schema_id)
|
||
|
||
response = {"success": True, "schema": schema}
|
||
if ui_schema:
|
||
response["ui_schema"] = ui_schema
|
||
|
||
return jsonify(response)
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/<config_id>", methods=["GET"])
|
||
def read_config(config_id):
|
||
"""Read configuration data from JSON file."""
|
||
try:
|
||
data = json_manager.read_json(config_id)
|
||
return jsonify({"success": True, "data": data})
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "error": str(e)}), 400
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/<config_id>", methods=["PUT"])
|
||
def write_config(config_id):
|
||
"""Write configuration data to JSON file."""
|
||
try:
|
||
payload = request.get_json(force=True, silent=False)
|
||
if not payload:
|
||
return jsonify({"success": False, "error": "No JSON data provided"}), 400
|
||
|
||
# Write the data
|
||
json_manager.write_json(config_id, payload)
|
||
|
||
# Automatically reload backend configuration for specific config types
|
||
if streamer:
|
||
try:
|
||
if config_id == "plc":
|
||
streamer.config_manager.load_configuration()
|
||
elif config_id in ["dataset-definitions", "dataset-variables"]:
|
||
# Reload dataset configuration to pick up changes
|
||
streamer.reload_dataset_configuration()
|
||
|
||
if streamer.logger:
|
||
streamer.logger.info(
|
||
f"Auto-reloaded backend configuration after updating {config_id}"
|
||
)
|
||
elif config_id in ["plot-definitions", "plot-variables"]:
|
||
# Plot configurations don't need backend reload currently
|
||
pass
|
||
except Exception as e:
|
||
# Log the error but don't fail the save operation
|
||
if streamer and streamer.logger:
|
||
streamer.logger.warning(
|
||
f"Could not auto-reload {config_id} config in backend: {e}"
|
||
)
|
||
else:
|
||
print(
|
||
f"Warning: Could not auto-reload {config_id} config in backend: {e}"
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Configuration '{config_id}' saved successfully",
|
||
}
|
||
)
|
||
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "error": str(e)}), 400
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/<config_id>/export", methods=["GET"])
|
||
def export_config(config_id):
|
||
"""Export configuration as downloadable JSON file."""
|
||
try:
|
||
data = json_manager.read_json(config_id)
|
||
|
||
# Prepare download response
|
||
content = json.dumps(data, indent=2, ensure_ascii=False)
|
||
filename = f"{config_id}_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
|
||
|
||
response = Response(content, mimetype="application/json")
|
||
response.headers["Content-Disposition"] = f"attachment; filename={filename}"
|
||
return response
|
||
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "error": str(e)}), 400
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/<config_id>/reload", methods=["POST"])
|
||
def reload_config(config_id):
|
||
"""Notify backend to reload configuration from JSON files."""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
if config_id == "plc":
|
||
streamer.config_manager.load_configuration()
|
||
elif config_id in ["dataset-definitions", "dataset-variables"]:
|
||
# Reload dataset configuration using the new method
|
||
streamer.reload_dataset_configuration()
|
||
elif config_id in ["plot-definitions", "plot-variables"]:
|
||
# Reload plot configuration if needed
|
||
pass
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Configuration '{config_id}' reloaded successfully",
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/config/dataset-variables/expanded", methods=["GET"])
|
||
def get_expanded_dataset_variables():
|
||
"""Get dataset variables with symbolic variables expanded."""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
# Get the expanded variables from the streamer's config manager
|
||
expanded_data = {"variables": []}
|
||
|
||
if (
|
||
hasattr(streamer.config_manager, "datasets")
|
||
and streamer.config_manager.datasets
|
||
):
|
||
for dataset_id, dataset_config in streamer.config_manager.datasets.items():
|
||
if "variables" in dataset_config and dataset_config["variables"]:
|
||
expanded_data["variables"].append(
|
||
{
|
||
"dataset_id": dataset_id,
|
||
"variables": dataset_config["variables"],
|
||
}
|
||
)
|
||
|
||
return jsonify({"success": True, "data": expanded_data})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
# ==============================
|
||
# Operational API (PLC Control, Streaming, etc.)
|
||
# ==============================
|
||
|
||
|
||
@app.route("/api/plc/config", methods=["POST"])
|
||
def update_plc_config():
|
||
"""Update PLC configuration"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
ip = data.get("ip", "10.1.33.11")
|
||
rack = int(data.get("rack", 0))
|
||
slot = int(data.get("slot", 2))
|
||
|
||
streamer.update_plc_config(ip, rack, slot)
|
||
return jsonify({"success": True, "message": "PLC configuration updated"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/udp/config", methods=["POST"])
|
||
def update_udp_config():
|
||
"""Update UDP configuration"""
|
||
try:
|
||
data = request.get_json()
|
||
host = data.get("host", "127.0.0.1")
|
||
port = int(data.get("port", 9870))
|
||
sampling_interval = data.get("sampling_interval") # Optional, can be None
|
||
|
||
streamer.update_udp_config(host, port, sampling_interval)
|
||
return jsonify({"success": True, "message": "UDP configuration updated"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/csv/config", methods=["GET"])
|
||
def get_csv_config():
|
||
"""Get CSV recording configuration"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
csv_config = streamer.config_manager.csv_config.copy()
|
||
|
||
# Add current directory information
|
||
current_dir = streamer.config_manager.get_csv_directory_path()
|
||
csv_config["current_directory"] = os.path.abspath(current_dir)
|
||
csv_config["directory_exists"] = os.path.exists(current_dir)
|
||
|
||
# Add disk space info
|
||
disk_info = streamer.get_disk_space_info()
|
||
if disk_info:
|
||
csv_config["disk_space"] = disk_info
|
||
|
||
return jsonify({"success": True, "config": csv_config})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/csv/config", methods=["POST"])
|
||
def update_csv_config():
|
||
"""Update CSV recording configuration"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
|
||
# Extract valid configuration parameters
|
||
config_updates = {}
|
||
valid_params = {
|
||
"records_directory",
|
||
"rotation_enabled",
|
||
"max_size_mb",
|
||
"max_days",
|
||
"max_hours",
|
||
"cleanup_interval_hours",
|
||
}
|
||
|
||
for param in valid_params:
|
||
if param in data:
|
||
config_updates[param] = data[param]
|
||
|
||
if not config_updates:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "No valid configuration parameters provided",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Update configuration
|
||
result = streamer.config_manager.update_csv_config(**config_updates)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "CSV configuration updated successfully",
|
||
"old_config": result["old_config"],
|
||
"new_config": result["new_config"],
|
||
}
|
||
)
|
||
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/csv/cleanup", methods=["POST"])
|
||
def trigger_csv_cleanup():
|
||
"""🔑 BACKGROUND: Manually trigger CSV cleanup with recording protection"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
# 🔑 PROTECTED: Perform cleanup with recording protection
|
||
streamer.perform_csv_cleanup_safe()
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "CSV cleanup queued in background (recording protection active)",
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/csv/directory/info", methods=["GET"])
|
||
def get_csv_directory_info():
|
||
"""Get information about CSV directory and files"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
base_dir = streamer.config_manager.get_csv_directory_path()
|
||
|
||
info = {
|
||
"base_directory": os.path.abspath(base_dir),
|
||
"directory_exists": os.path.exists(base_dir),
|
||
"total_files": 0,
|
||
"total_size_mb": 0,
|
||
"oldest_file": None,
|
||
"newest_file": None,
|
||
"day_folders": [],
|
||
}
|
||
|
||
if os.path.exists(base_dir):
|
||
total_size = 0
|
||
oldest_time = None
|
||
newest_time = None
|
||
|
||
for day_folder in os.listdir(base_dir):
|
||
day_path = os.path.join(base_dir, day_folder)
|
||
if os.path.isdir(day_path):
|
||
day_info = {"name": day_folder, "files": 0, "size_mb": 0}
|
||
|
||
for file_name in os.listdir(day_path):
|
||
if file_name.endswith(".csv"):
|
||
file_path = os.path.join(day_path, file_name)
|
||
if os.path.isfile(file_path):
|
||
stat = os.stat(file_path)
|
||
file_size = stat.st_size
|
||
file_time = stat.st_mtime
|
||
|
||
info["total_files"] += 1
|
||
day_info["files"] += 1
|
||
total_size += file_size
|
||
day_info["size_mb"] += file_size / (1024 * 1024)
|
||
|
||
if oldest_time is None or file_time < oldest_time:
|
||
oldest_time = file_time
|
||
info["oldest_file"] = datetime.fromtimestamp(
|
||
file_time
|
||
).isoformat()
|
||
|
||
if newest_time is None or file_time > newest_time:
|
||
newest_time = file_time
|
||
info["newest_file"] = datetime.fromtimestamp(
|
||
file_time
|
||
).isoformat()
|
||
|
||
day_info["size_mb"] = round(day_info["size_mb"], 2)
|
||
info["day_folders"].append(day_info)
|
||
|
||
info["total_size_mb"] = round(total_size / (1024 * 1024), 2)
|
||
info["day_folders"].sort(key=lambda x: x["name"], reverse=True)
|
||
|
||
return jsonify({"success": True, "info": info})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plc/connect", methods=["POST"])
|
||
def connect_plc():
|
||
"""Connect to PLC"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
if streamer.connect_plc():
|
||
return jsonify({"success": True, "message": "Connected to PLC"})
|
||
else:
|
||
# Extraer detalle de error si disponible
|
||
last_error = None
|
||
try:
|
||
last_error = streamer.plc_client.last_error
|
||
except Exception:
|
||
last_error = None
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Error connecting to PLC",
|
||
"error": last_error,
|
||
"plc_config": streamer.config_manager.plc_config,
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/plc/disconnect", methods=["POST"])
|
||
def disconnect_plc():
|
||
"""Disconnect from PLC"""
|
||
streamer.disconnect_plc(manual_disconnect=True) # User manually disconnected
|
||
# Return updated status to ensure frontend gets correct state
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "Disconnected from PLC",
|
||
"status": {
|
||
"plc_connected": False,
|
||
"streaming": False,
|
||
"csv_recording": False,
|
||
},
|
||
}
|
||
)
|
||
|
||
|
||
@app.route("/api/variables", methods=["POST"])
|
||
def add_variable():
|
||
"""Add a new variable"""
|
||
try:
|
||
data = request.get_json()
|
||
name = data.get("name")
|
||
area = data.get("area")
|
||
db = int(data.get("db"))
|
||
offset = int(data.get("offset"))
|
||
var_type = data.get("type")
|
||
bit = data.get("bit") # Added bit parameter
|
||
|
||
valid_types = [
|
||
"real",
|
||
"int",
|
||
"bool",
|
||
"dint",
|
||
"word",
|
||
"byte",
|
||
"uint",
|
||
"udint",
|
||
"sint",
|
||
"usint",
|
||
]
|
||
valid_areas = ["db", "mw", "m", "pew", "pe", "paw", "pa", "e", "a", "mb"]
|
||
|
||
if (
|
||
not name
|
||
or not area
|
||
or var_type not in valid_types
|
||
or area.lower() not in valid_areas
|
||
):
|
||
return jsonify({"success": False, "message": "Invalid data"}), 400
|
||
|
||
if area.lower() in ["e", "a", "mb"] and bit is None:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Bit position must be specified for bit areas",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
if area.lower() in ["e", "a", "mb"] and (bit < 0 or bit > 7):
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Bit position must be between 0 and 7",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
streamer.add_variable(name, area, db, offset, var_type, bit)
|
||
return jsonify({"success": True, "message": f"Variable {name} added"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/variables/<name>", methods=["DELETE"])
|
||
def remove_variable(name):
|
||
"""Remove a variable"""
|
||
streamer.remove_variable(name)
|
||
return jsonify({"success": True, "message": f"Variable {name} removed"})
|
||
|
||
|
||
@app.route("/api/variables/<name>", methods=["GET"])
|
||
def get_variable(name):
|
||
"""Get a specific variable configuration from current dataset"""
|
||
try:
|
||
if not streamer.current_dataset_id:
|
||
return (
|
||
jsonify({"success": False, "message": "No dataset selected"}),
|
||
400,
|
||
)
|
||
|
||
current_variables = streamer.get_dataset_variables(streamer.current_dataset_id)
|
||
if name not in current_variables:
|
||
return (
|
||
jsonify({"success": False, "message": f"Variable {name} not found"}),
|
||
404,
|
||
)
|
||
|
||
var_config = current_variables[name].copy()
|
||
var_config["name"] = name
|
||
|
||
# Check if variable is in streaming list for current dataset
|
||
streaming_vars = streamer.datasets[streamer.current_dataset_id].get(
|
||
"streaming_variables", []
|
||
)
|
||
var_config["streaming"] = name in streaming_vars
|
||
|
||
return jsonify({"success": True, "variable": var_config})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/variables/<name>", methods=["PUT"])
|
||
def update_variable(name):
|
||
"""Update an existing variable in current dataset"""
|
||
try:
|
||
if not streamer.current_dataset_id:
|
||
return jsonify({"success": False, "message": "No dataset selected"}), 400
|
||
|
||
data = request.get_json()
|
||
new_name = data.get("name", name)
|
||
area = data.get("area")
|
||
db = int(data.get("db", 1))
|
||
offset = int(data.get("offset"))
|
||
var_type = data.get("type")
|
||
bit = data.get("bit")
|
||
|
||
valid_types = [
|
||
"real",
|
||
"int",
|
||
"bool",
|
||
"dint",
|
||
"word",
|
||
"byte",
|
||
"uint",
|
||
"udint",
|
||
"sint",
|
||
"usint",
|
||
]
|
||
valid_areas = ["db", "mw", "m", "pew", "pe", "paw", "pa", "e", "a", "mb"]
|
||
|
||
if (
|
||
not new_name
|
||
or not area
|
||
or var_type not in valid_types
|
||
or area.lower() not in valid_areas
|
||
):
|
||
return jsonify({"success": False, "message": "Invalid data"}), 400
|
||
|
||
if area.lower() in ["e", "a", "mb"] and bit is None:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Bit position must be specified for bit areas",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
if area.lower() in ["e", "a", "mb"] and (bit < 0 or bit > 7):
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Bit position must be between 0 and 7",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
current_dataset_id = streamer.current_dataset_id
|
||
current_variables = streamer.get_dataset_variables(current_dataset_id)
|
||
|
||
# Check if variable exists in current dataset
|
||
if name not in current_variables:
|
||
return (
|
||
jsonify({"success": False, "message": f"Variable {name} not found"}),
|
||
404,
|
||
)
|
||
|
||
# Check if new name already exists (if name is changing)
|
||
if name != new_name and new_name in current_variables:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": f"Variable {new_name} already exists",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Preserve streaming state
|
||
streaming_vars = streamer.datasets[current_dataset_id].get(
|
||
"streaming_variables", []
|
||
)
|
||
was_streaming = name in streaming_vars
|
||
|
||
# Remove old variable
|
||
streamer.remove_variable_from_dataset(current_dataset_id, name)
|
||
|
||
# Add updated variable
|
||
streamer.add_variable_to_dataset(
|
||
current_dataset_id, new_name, area, db, offset, var_type, bit, was_streaming
|
||
)
|
||
|
||
return jsonify({"success": True, "message": "Variable updated successfully"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/variables/<name>/streaming", methods=["POST"])
|
||
def toggle_variable_streaming(name):
|
||
"""Toggle streaming for a specific variable in current dataset"""
|
||
try:
|
||
if not streamer.current_dataset_id:
|
||
return jsonify({"success": False, "message": "No dataset selected"}), 400
|
||
|
||
data = request.get_json()
|
||
enabled = data.get("enabled", False)
|
||
|
||
# Use the new dataset-specific method
|
||
streamer.toggle_variable_streaming(streamer.current_dataset_id, name, enabled)
|
||
|
||
status = "enabled" if enabled else "disabled"
|
||
return jsonify(
|
||
{"success": True, "message": f"Variable {name} streaming {status}"}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/variables/streaming", methods=["GET"])
|
||
def get_streaming_variables():
|
||
"""Get list of variables enabled for streaming in current dataset"""
|
||
if streamer is None:
|
||
return jsonify({"success": False, "error": "Streamer not initialized"}), 503
|
||
|
||
# Get streaming variables from current dataset
|
||
streaming_vars = []
|
||
if streamer.current_dataset_id and streamer.current_dataset_id in streamer.datasets:
|
||
streaming_vars = streamer.datasets[streamer.current_dataset_id].get(
|
||
"streaming_variables", []
|
||
)
|
||
|
||
return jsonify({"success": True, "streaming_variables": streaming_vars})
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>/variables/values", methods=["GET"])
|
||
def get_dataset_variable_values(dataset_id):
|
||
"""🔑 RATE LIMITED: Get current values of all variables in a dataset - CACHE ONLY with recording protection"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
# Check if dataset exists
|
||
if dataset_id not in streamer.datasets:
|
||
return (
|
||
jsonify(
|
||
{"success": False, "message": f"Dataset '{dataset_id}' not found"}
|
||
),
|
||
404,
|
||
)
|
||
|
||
# Get dataset variables
|
||
dataset_variables = streamer.get_dataset_variables(dataset_id)
|
||
|
||
if not dataset_variables:
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "No variables defined in this dataset",
|
||
"values": {},
|
||
"source": "no_variables",
|
||
}
|
||
)
|
||
|
||
# Check if dataset is active (required for cache to be populated)
|
||
if dataset_id not in streamer.active_datasets:
|
||
return jsonify(
|
||
{
|
||
"success": False,
|
||
"message": f"Dataset '{dataset_id}' is not active. Activate the dataset to start reading variables and populate cache.",
|
||
"error_type": "dataset_inactive",
|
||
"values": {},
|
||
"detailed_errors": {},
|
||
"stats": {
|
||
"success": 0,
|
||
"failed": 0,
|
||
"total": len(dataset_variables),
|
||
},
|
||
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||
"source": "no_cache_dataset_inactive",
|
||
"is_cached": False,
|
||
}
|
||
)
|
||
|
||
# Check if PLC is connected (required for streaming to populate cache)
|
||
if not streamer.plc_client.is_connected():
|
||
return jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "PLC not connected. Connect to PLC and activate dataset to populate cache.",
|
||
"error_type": "plc_disconnected",
|
||
"values": {},
|
||
"detailed_errors": {},
|
||
"stats": {
|
||
"success": 0,
|
||
"failed": 0,
|
||
"total": len(dataset_variables),
|
||
},
|
||
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||
"source": "no_cache_plc_disconnected",
|
||
"is_cached": False,
|
||
}
|
||
)
|
||
|
||
# Check if cached values are available
|
||
if not streamer.has_cached_values(dataset_id):
|
||
return jsonify(
|
||
{
|
||
"success": False,
|
||
"message": f"No cached values available for dataset '{dataset_id}'. Cache is populated by the streaming process at the dataset's configured interval. Please wait for the next reading cycle.",
|
||
"error_type": "no_cache_available",
|
||
"values": {},
|
||
"detailed_errors": {},
|
||
"stats": {
|
||
"success": 0,
|
||
"failed": 0,
|
||
"total": len(dataset_variables),
|
||
},
|
||
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||
"source": "no_cache_available",
|
||
"is_cached": False,
|
||
"note": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
||
}
|
||
)
|
||
|
||
# 🔑 PROTECTED: Get cached values with rate limiting and recording protection
|
||
read_result = streamer.get_cached_dataset_values_safe(dataset_id)
|
||
|
||
# Handle rate limiting response
|
||
if (
|
||
not read_result.get("success", False)
|
||
and read_result.get("error_type") == "rate_limited"
|
||
):
|
||
return jsonify(read_result), 429 # Too Many Requests
|
||
|
||
# Handle API timeout response
|
||
if (
|
||
not read_result.get("success", False)
|
||
and read_result.get("error_type") == "api_timeout"
|
||
):
|
||
return jsonify(read_result), 503 # Service Unavailable
|
||
|
||
# Convert timestamp from ISO format to readable format for consistency
|
||
if read_result.get("timestamp"):
|
||
try:
|
||
cached_timestamp = datetime.fromisoformat(read_result["timestamp"])
|
||
read_result["timestamp"] = cached_timestamp.strftime(
|
||
"%Y-%m-%d %H:%M:%S"
|
||
)
|
||
except:
|
||
pass # Keep original timestamp if conversion fails
|
||
|
||
# Extract values and handle diagnostics
|
||
if not read_result.get("success", False):
|
||
# Complete failure case
|
||
error_msg = read_result.get("error", "Unknown error reading variables")
|
||
error_type = read_result.get("error_type", "unknown")
|
||
|
||
# Log detailed error information
|
||
if streamer.logger:
|
||
streamer.logger.error(
|
||
f"Cached values indicate failure for dataset '{dataset_id}': {error_msg}"
|
||
)
|
||
if read_result.get("errors"):
|
||
for var_name, var_error in read_result["errors"].items():
|
||
streamer.logger.error(f" Variable '{var_name}': {var_error}")
|
||
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": error_msg,
|
||
"error_type": error_type,
|
||
"values": {},
|
||
"detailed_errors": read_result.get("errors", {}),
|
||
"stats": read_result.get("stats", {}),
|
||
"timestamp": read_result.get(
|
||
"timestamp", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||
),
|
||
"source": "cache",
|
||
"is_cached": True,
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
|
||
# Success or partial success case
|
||
raw_values = read_result.get("values", {})
|
||
variable_errors = read_result.get("errors", {})
|
||
stats = read_result.get("stats", {})
|
||
|
||
# Format values for display
|
||
formatted_values = {}
|
||
error_details = {}
|
||
|
||
for var_name, value in raw_values.items():
|
||
if value is not None:
|
||
var_config = dataset_variables[var_name]
|
||
var_type = var_config.get("type", "unknown")
|
||
|
||
# Format value based on type
|
||
try:
|
||
if var_type == "real":
|
||
formatted_values[var_name] = (
|
||
f"{value:.3f}"
|
||
if isinstance(value, (int, float))
|
||
else str(value)
|
||
)
|
||
elif var_type == "bool":
|
||
formatted_values[var_name] = "TRUE" if value else "FALSE"
|
||
elif var_type in [
|
||
"int",
|
||
"uint",
|
||
"dint",
|
||
"udint",
|
||
"word",
|
||
"byte",
|
||
"sint",
|
||
"usint",
|
||
]:
|
||
formatted_values[var_name] = (
|
||
str(int(value))
|
||
if isinstance(value, (int, float))
|
||
else str(value)
|
||
)
|
||
else:
|
||
formatted_values[var_name] = str(value)
|
||
except Exception as format_error:
|
||
formatted_values[var_name] = "FORMAT_ERROR"
|
||
error_details[var_name] = f"Format error: {str(format_error)}"
|
||
else:
|
||
# Variable had an error - get the specific error message
|
||
specific_error = variable_errors.get(var_name, "Unknown error")
|
||
formatted_values[var_name] = "ERROR"
|
||
error_details[var_name] = specific_error
|
||
|
||
# Prepare response message
|
||
total_vars = stats.get("total", len(dataset_variables))
|
||
success_vars = stats.get("success", 0)
|
||
failed_vars = stats.get("failed", 0)
|
||
|
||
# Determine data source for message (always cache now)
|
||
data_source = "cache"
|
||
source_text = " (from streaming cache)"
|
||
|
||
if failed_vars == 0:
|
||
message = (
|
||
f"Successfully displaying all {success_vars} variables{source_text}"
|
||
)
|
||
response_success = True
|
||
else:
|
||
message = f"Partial data available: {success_vars}/{total_vars} variables have valid cached values, {failed_vars} failed{source_text}"
|
||
response_success = True # Still success if we got some values
|
||
|
||
# Log warnings for partial failures
|
||
if streamer.logger:
|
||
streamer.logger.warning(
|
||
f"Partial failure in cached values for dataset '{dataset_id}': {message}"
|
||
)
|
||
for var_name, var_error in error_details.items():
|
||
if formatted_values.get(var_name) == "ERROR":
|
||
streamer.logger.warning(f" Variable '{var_name}': {var_error}")
|
||
|
||
return jsonify(
|
||
{
|
||
"success": response_success,
|
||
"message": message,
|
||
"values": formatted_values,
|
||
"detailed_errors": error_details,
|
||
"stats": stats,
|
||
"timestamp": read_result.get(
|
||
"timestamp", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||
),
|
||
"warning": read_result.get("warning"),
|
||
"source": data_source,
|
||
"is_cached": True,
|
||
"cache_info": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": f"Error retrieving cached variable values: {str(e)}",
|
||
"values": {},
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
# Dataset Management API Endpoints
|
||
|
||
|
||
@app.route("/api/datasets", methods=["GET"])
|
||
def get_datasets():
|
||
"""Get all datasets"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"datasets": streamer.datasets,
|
||
"active_datasets": list(streamer.active_datasets),
|
||
"current_dataset_id": streamer.current_dataset_id,
|
||
}
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets", methods=["POST"])
|
||
def create_dataset():
|
||
"""Create a new dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
dataset_id = data.get("dataset_id", "").strip()
|
||
name = data.get("name", "").strip()
|
||
prefix = data.get("prefix", "").strip()
|
||
sampling_interval = data.get("sampling_interval")
|
||
|
||
if not dataset_id or not name or not prefix:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Dataset ID, name, and prefix are required",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Validate sampling interval if provided
|
||
if sampling_interval is not None:
|
||
sampling_interval = float(sampling_interval)
|
||
if sampling_interval < 0.01:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Sampling interval must be at least 0.01 seconds",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
streamer.create_dataset(dataset_id, name, prefix, sampling_interval)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Dataset '{name}' created successfully",
|
||
"dataset_id": dataset_id,
|
||
}
|
||
)
|
||
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
except Exception as e:
|
||
return (
|
||
jsonify({"success": False, "message": f"Error creating dataset: {str(e)}"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>", methods=["DELETE"])
|
||
def delete_dataset(dataset_id):
|
||
"""Delete a dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.delete_dataset(dataset_id)
|
||
return jsonify({"success": True, "message": "Dataset deleted successfully"})
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 404
|
||
except Exception as e:
|
||
return (
|
||
jsonify({"success": False, "message": f"Error deleting dataset: {str(e)}"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>/activate", methods=["POST"])
|
||
def activate_dataset(dataset_id):
|
||
"""Activate a dataset for streaming"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.activate_dataset(dataset_id)
|
||
return jsonify({"success": True, "message": "Dataset activated successfully"})
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 404
|
||
except RuntimeError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{"success": False, "message": f"Error activating dataset: {str(e)}"}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>/deactivate", methods=["POST"])
|
||
def deactivate_dataset(dataset_id):
|
||
"""Deactivate a dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.deactivate_dataset(dataset_id)
|
||
return jsonify({"success": True, "message": "Dataset deactivated successfully"})
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 404
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{"success": False, "message": f"Error deactivating dataset: {str(e)}"}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>/variables", methods=["POST"])
|
||
def add_variable_to_dataset(dataset_id):
|
||
"""Add a variable to a dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
name = data.get("name", "").strip()
|
||
area = data.get("area", "").strip()
|
||
db = data.get("db", 1)
|
||
offset = data.get("offset", 0)
|
||
var_type = data.get("type", "").strip()
|
||
bit = data.get("bit")
|
||
streaming = data.get("streaming", False)
|
||
|
||
if not name or not area or not var_type:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": "Variable name, area, and type are required",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
streamer.add_variable_to_dataset(
|
||
dataset_id, name, area, db, offset, var_type, bit, streaming
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Variable '{name}' added to dataset successfully",
|
||
}
|
||
)
|
||
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
except Exception as e:
|
||
return (
|
||
jsonify({"success": False, "message": f"Error adding variable: {str(e)}"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/<dataset_id>/variables/<variable_name>", methods=["DELETE"])
|
||
def remove_variable_from_dataset(dataset_id, variable_name):
|
||
"""Remove a variable from a dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.remove_variable_from_dataset(dataset_id, variable_name)
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Variable '{variable_name}' removed from dataset successfully",
|
||
}
|
||
)
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 404
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{"success": False, "message": f"Error removing variable: {str(e)}"}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route(
|
||
"/api/datasets/<dataset_id>/variables/<variable_name>/streaming", methods=["POST"]
|
||
)
|
||
def toggle_variable_streaming_in_dataset(dataset_id, variable_name):
|
||
"""Toggle streaming for a variable in a dataset"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
enabled = data.get("enabled", False)
|
||
|
||
streamer.toggle_variable_streaming(dataset_id, variable_name, enabled)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Variable '{variable_name}' streaming {'enabled' if enabled else 'disabled'}",
|
||
}
|
||
)
|
||
except ValueError as e:
|
||
return jsonify({"success": False, "message": str(e)}), 404
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{"success": False, "message": f"Error toggling streaming: {str(e)}"}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/datasets/current", methods=["POST"])
|
||
def set_current_dataset():
|
||
"""Set the current dataset for editing"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
dataset_id = data.get("dataset_id")
|
||
|
||
if dataset_id and dataset_id in streamer.datasets:
|
||
streamer.current_dataset_id = dataset_id
|
||
# Note: No need to save - this is just changing current selection in memory
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "Current dataset updated successfully",
|
||
"current_dataset_id": dataset_id,
|
||
}
|
||
)
|
||
else:
|
||
return jsonify({"success": False, "message": "Invalid dataset ID"}), 400
|
||
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"message": f"Error setting current dataset: {str(e)}",
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/streaming/start", methods=["POST"])
|
||
def start_streaming():
|
||
"""Start UDP streaming (legacy endpoint - now only starts UDP streaming)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
if streamer.start_streaming():
|
||
return jsonify({"success": True, "message": "UDP streaming started"})
|
||
else:
|
||
return (
|
||
jsonify({"success": False, "message": "Error starting UDP streaming"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/streaming/stop", methods=["POST"])
|
||
def stop_streaming():
|
||
"""Stop UDP streaming (legacy endpoint - now only stops UDP streaming)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
streamer.stop_streaming()
|
||
return jsonify({"success": True, "message": "UDP streaming stopped"})
|
||
|
||
|
||
@app.route("/api/sampling", methods=["POST"])
|
||
def update_sampling():
|
||
"""Update sampling interval"""
|
||
try:
|
||
data = request.get_json()
|
||
interval = float(data.get("interval", 0.1))
|
||
|
||
if interval < 0.01:
|
||
interval = 0.01
|
||
|
||
streamer.update_sampling_interval(interval)
|
||
return jsonify({"success": True, "message": f"Interval updated to {interval}s"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "message": str(e)}), 400
|
||
|
||
|
||
@app.route("/api/csv/start", methods=["POST"])
|
||
def start_csv_recording_legacy():
|
||
"""Start CSV recording independently (legacy endpoint)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
if streamer.data_streamer.start_csv_recording():
|
||
return jsonify({"success": True, "message": "CSV recording started"})
|
||
else:
|
||
return (
|
||
jsonify({"success": False, "message": "Error starting CSV recording"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/csv/stop", methods=["POST"])
|
||
def stop_csv_recording_legacy():
|
||
"""Stop CSV recording independently (legacy endpoint)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
streamer.data_streamer.stop_csv_recording()
|
||
return jsonify({"success": True, "message": "CSV recording stopped"})
|
||
|
||
|
||
@app.route("/api/csv/recording/start", methods=["POST"])
|
||
def start_csv_recording():
|
||
"""Start CSV recording independently of UDP streaming"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
if streamer.data_streamer.start_csv_recording():
|
||
return jsonify({"success": True, "message": "CSV recording started"})
|
||
else:
|
||
return (
|
||
jsonify({"success": False, "message": "Error starting CSV recording"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/csv/recording/stop", methods=["POST"])
|
||
def stop_csv_recording():
|
||
"""Stop CSV recording independently of UDP streaming"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
streamer.data_streamer.stop_csv_recording()
|
||
return jsonify({"success": True, "message": "CSV recording stopped"})
|
||
|
||
|
||
# 🔑 NEW: UDP Streaming Control (Independent)
|
||
@app.route("/api/udp/streaming/start", methods=["POST"])
|
||
def start_udp_streaming():
|
||
"""Start UDP streaming to PlotJuggler independently of CSV recording"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
if streamer.data_streamer.start_udp_streaming():
|
||
return jsonify(
|
||
{"success": True, "message": "UDP streaming to PlotJuggler started"}
|
||
)
|
||
else:
|
||
return (
|
||
jsonify({"success": False, "message": "Error starting UDP streaming"}),
|
||
500,
|
||
)
|
||
|
||
|
||
@app.route("/api/udp/streaming/stop", methods=["POST"])
|
||
def stop_udp_streaming():
|
||
"""Stop UDP streaming to PlotJuggler independently of CSV recording"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
streamer.data_streamer.stop_udp_streaming()
|
||
return jsonify({"success": True, "message": "UDP streaming to PlotJuggler stopped"})
|
||
|
||
|
||
# 🔍 HEALTH CHECK ENDPOINT
|
||
@app.route("/api/health", methods=["GET"])
|
||
def health_check():
|
||
"""Simple health check endpoint"""
|
||
try:
|
||
streamer_status = "initialized" if streamer is not None else "not_initialized"
|
||
plot_manager_status = (
|
||
"available"
|
||
if (
|
||
streamer
|
||
and hasattr(streamer, "data_streamer")
|
||
and hasattr(streamer.data_streamer, "plot_manager")
|
||
)
|
||
else "not_available"
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"status": "ok",
|
||
"streamer": streamer_status,
|
||
"plot_manager": plot_manager_status,
|
||
"timestamp": time.time(),
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return (
|
||
jsonify({"status": "error", "error": str(e), "timestamp": time.time()}),
|
||
500,
|
||
)
|
||
|
||
|
||
# 📈 PLOT MANAGER API ENDPOINTS
|
||
@app.route("/api/plots", methods=["GET"])
|
||
def get_plots():
|
||
"""Get all plot sessions status"""
|
||
# print("🔍 DEBUG: /api/plots endpoint called")
|
||
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
print("❌ DEBUG: Streamer not initialized")
|
||
return error_response
|
||
|
||
print("✅ DEBUG: Streamer is initialized")
|
||
|
||
try:
|
||
# print("🔍 DEBUG: Accessing streamer.data_streamer.plot_manager...")
|
||
plot_manager = streamer.data_streamer.plot_manager
|
||
print(f"✅ DEBUG: Plot manager obtained: {type(plot_manager)}")
|
||
|
||
# print("🔍 DEBUG: Calling get_all_sessions_status()...")
|
||
sessions = plot_manager.get_all_sessions_status()
|
||
print(f"✅ DEBUG: Sessions obtained: {len(sessions)} sessions")
|
||
print(f"📊 DEBUG: Sessions data: {sessions}")
|
||
|
||
result = {"sessions": sessions}
|
||
print(f"✅ DEBUG: Returning result: {result}")
|
||
return jsonify(result)
|
||
except Exception as e:
|
||
print(f"💥 DEBUG: Exception in get_plots: {type(e).__name__}: {str(e)}")
|
||
import traceback
|
||
|
||
print(f"📋 DEBUG: Full traceback:")
|
||
traceback.print_exc()
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots", methods=["POST"])
|
||
def create_plot():
|
||
"""Create a new plot session"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
|
||
# Validar datos requeridos
|
||
if not data.get("variables"):
|
||
return jsonify({"error": "At least one variable is required"}), 400
|
||
|
||
if not data.get("time_window"):
|
||
return jsonify({"error": "Time window is required"}), 400
|
||
|
||
# Validar que las variables existen en datasets activos
|
||
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
invalid_vars = [var for var in data["variables"] if var not in available_vars]
|
||
if invalid_vars:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"error": f"Variables not available: {', '.join(invalid_vars)}",
|
||
"available_variables": available_vars,
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Validar trigger si está habilitado
|
||
if data.get("trigger_enabled") and data.get("trigger_variable"):
|
||
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
if data["trigger_variable"] not in boolean_vars:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"error": f"Trigger variable '{data['trigger_variable']}' is not a boolean variable",
|
||
"boolean_variables": boolean_vars,
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Crear configuración de la sesión
|
||
config = {
|
||
"id": data.get("id"), # Use the plot ID from the definition
|
||
"name": data.get(
|
||
"name", f"Plot {len(streamer.data_streamer.plot_manager.sessions) + 1}"
|
||
),
|
||
"variables": data["variables"],
|
||
"time_window": int(data["time_window"]),
|
||
"y_min": data.get("y_min"),
|
||
"y_max": data.get("y_max"),
|
||
"trigger_variable": data.get("trigger_variable"),
|
||
"trigger_enabled": data.get("trigger_enabled", False),
|
||
"trigger_on_true": data.get("trigger_on_true", True),
|
||
}
|
||
|
||
# Convertir valores numéricos si están presentes
|
||
if config["y_min"] is not None:
|
||
config["y_min"] = float(config["y_min"])
|
||
if config["y_max"] is not None:
|
||
config["y_max"] = float(config["y_max"])
|
||
|
||
# Check if multiple sessions should be allowed (default: True for backward compatibility)
|
||
allow_multiple = data.get("allow_multiple", True)
|
||
|
||
session_id = streamer.data_streamer.plot_manager.create_session(
|
||
config, allow_multiple
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"session_id": session_id,
|
||
"message": f"Plot session '{config['name']}' created successfully",
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/<session_id>", methods=["DELETE"])
|
||
def remove_plot(session_id):
|
||
"""Remove a plot session"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
success = streamer.data_streamer.plot_manager.remove_session(session_id)
|
||
if success:
|
||
return jsonify({"success": True, "message": "Plot session removed"})
|
||
else:
|
||
return jsonify({"error": "Plot session not found"}), 404
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/<session_id>/control", methods=["POST"])
|
||
def control_plot(session_id):
|
||
"""Control a plot session (start/stop/pause/resume/clear)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
action = data.get("action")
|
||
|
||
if action not in ["start", "stop", "pause", "resume", "clear"]:
|
||
return (
|
||
jsonify(
|
||
{"error": "Invalid action. Use: start, stop, pause, resume, clear"}
|
||
),
|
||
400,
|
||
)
|
||
|
||
success = streamer.data_streamer.plot_manager.control_session(
|
||
session_id, action
|
||
)
|
||
if success:
|
||
return jsonify({"success": True, "message": f"Plot session {action}ed"})
|
||
else:
|
||
return jsonify({"error": "Plot session not found"}), 404
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/<session_id>/data", methods=["GET"])
|
||
def get_plot_data(session_id):
|
||
"""Get plot data for a specific session"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
plot_data = streamer.data_streamer.plot_manager.get_session_data(session_id)
|
||
if plot_data:
|
||
return jsonify(plot_data)
|
||
else:
|
||
return jsonify({"error": "Plot session not found"}), 404
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/<session_id>/config", methods=["GET"])
|
||
def get_plot_config(session_id):
|
||
"""Get plot configuration for a specific session"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
config = streamer.data_streamer.plot_manager.get_session_config(session_id)
|
||
if config:
|
||
return jsonify({"success": True, "config": config})
|
||
else:
|
||
return jsonify({"error": "Plot session not found"}), 404
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/<session_id>/config", methods=["PUT"])
|
||
def update_plot_config(session_id):
|
||
"""Update plot configuration for a specific session"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json()
|
||
|
||
# Validar datos requeridos
|
||
if not data.get("variables"):
|
||
return jsonify({"error": "At least one variable is required"}), 400
|
||
|
||
if not data.get("time_window"):
|
||
return jsonify({"error": "Time window is required"}), 400
|
||
|
||
# Validar que las variables existen en datasets activos
|
||
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
invalid_vars = [var for var in data["variables"] if var not in available_vars]
|
||
if invalid_vars:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"error": f"Variables not available: {', '.join(invalid_vars)}",
|
||
"available_variables": available_vars,
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Validar trigger si está habilitado
|
||
if data.get("trigger_enabled") and data.get("trigger_variable"):
|
||
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
if data["trigger_variable"] not in boolean_vars:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"error": f"Trigger variable '{data['trigger_variable']}' is not a boolean variable",
|
||
"boolean_variables": boolean_vars,
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
# Crear configuración actualizada
|
||
config = {
|
||
"name": data.get("name", f"Plot {session_id}"),
|
||
"variables": data["variables"],
|
||
"time_window": int(data["time_window"]),
|
||
"y_min": data.get("y_min"),
|
||
"y_max": data.get("y_max"),
|
||
"trigger_variable": data.get("trigger_variable"),
|
||
"trigger_enabled": data.get("trigger_enabled", False),
|
||
"trigger_on_true": data.get("trigger_on_true", True),
|
||
}
|
||
|
||
# Convertir valores numéricos si están presentes
|
||
if config["y_min"] is not None:
|
||
config["y_min"] = float(config["y_min"])
|
||
if config["y_max"] is not None:
|
||
config["y_max"] = float(config["y_max"])
|
||
|
||
# Actualizar configuración
|
||
success = streamer.data_streamer.plot_manager.update_session_config(
|
||
session_id, config
|
||
)
|
||
|
||
if success:
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Plot session '{config['name']}' updated successfully",
|
||
}
|
||
)
|
||
else:
|
||
return jsonify({"error": "Plot session not found"}), 404
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/variables", methods=["GET"])
|
||
def get_plot_variables():
|
||
"""Get available variables for plotting"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
available_vars = streamer.data_streamer.plot_manager.get_available_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
boolean_vars = streamer.data_streamer.plot_manager.get_boolean_variables(
|
||
streamer.data_streamer.get_active_datasets(),
|
||
streamer.config_manager.datasets,
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"available_variables": available_vars,
|
||
"boolean_variables": boolean_vars,
|
||
"active_datasets_count": len(
|
||
streamer.data_streamer.get_active_datasets()
|
||
),
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/historical", methods=["POST"])
|
||
def get_historical_data():
|
||
"""Get historical data from CSV files for plot initialization with caching"""
|
||
global historical_cache
|
||
|
||
print("🔍 DEBUG: Historical endpoint called")
|
||
try:
|
||
data = request.get_json()
|
||
print(f"🔍 DEBUG: Request data: {data}")
|
||
|
||
if not data:
|
||
print("❌ DEBUG: No data provided")
|
||
return jsonify({"error": "No data provided"}), 400
|
||
|
||
variables = data.get("variables", [])
|
||
time_window_seconds = data.get("time_window", 60)
|
||
|
||
# Support for explicit start_time and end_time parameters
|
||
start_time_param = data.get("start_time")
|
||
end_time_param = data.get("end_time")
|
||
|
||
print(f"🔍 DEBUG: Variables: {variables}")
|
||
print(f"🔍 DEBUG: Time window: {time_window_seconds}")
|
||
print(f"🔍 DEBUG: Start time param: {start_time_param}")
|
||
print(f"🔍 DEBUG: End time param: {end_time_param}")
|
||
|
||
if not variables:
|
||
print("❌ DEBUG: No variables specified")
|
||
return jsonify({"error": "No variables specified"}), 400
|
||
|
||
# Filter out None/null values and empty strings
|
||
valid_variables = [
|
||
var for var in variables if var is not None and str(var).strip()
|
||
]
|
||
if not valid_variables:
|
||
print("❌ DEBUG: No valid variables after filtering")
|
||
return jsonify({"error": "No valid variables provided"}), 400
|
||
|
||
variables = valid_variables
|
||
print(f"🔍 DEBUG: Valid variables after filtering: {variables}")
|
||
|
||
# Import required modules
|
||
try:
|
||
# print("🔍 DEBUG: Importing modules...")
|
||
import pandas as pd
|
||
import glob
|
||
|
||
# print("🔍 DEBUG: All imports successful")
|
||
except ImportError as e:
|
||
print(f"❌ DEBUG: Import failed: {e}")
|
||
return jsonify({"error": f"pandas import failed: {str(e)}"}), 500
|
||
|
||
# Initialize cache if not exists
|
||
if historical_cache is None:
|
||
historical_cache = HistoricalDataCache(
|
||
logger=streamer.logger if streamer and streamer.logger else None,
|
||
max_cache_entries=50,
|
||
buffer_percentage=0.25,
|
||
)
|
||
|
||
# Calculate time range
|
||
try:
|
||
# print("🔍 DEBUG: Calculating time range...")
|
||
|
||
if start_time_param and end_time_param:
|
||
# Parse timestamps from frontend (UTC) and convert to local time
|
||
|
||
# Parse as UTC timestamps (frontend sends them with 'Z')
|
||
start_time_utc = datetime.fromisoformat(
|
||
start_time_param.replace("Z", "+00:00")
|
||
)
|
||
end_time_utc = datetime.fromisoformat(
|
||
end_time_param.replace("Z", "+00:00")
|
||
)
|
||
|
||
# Convert to local time (remove timezone info since CSV data has no timezone)
|
||
start_time = start_time_utc.astimezone().replace(tzinfo=None)
|
||
end_time = end_time_utc.astimezone().replace(tzinfo=None)
|
||
|
||
print(f"🔍 DEBUG: UTC timestamps: {start_time_utc} to {end_time_utc}")
|
||
print(f"🔍 DEBUG: Local timestamps: {start_time} to {end_time}")
|
||
|
||
# Validate time range
|
||
if start_time >= end_time:
|
||
print(
|
||
f"❌ DEBUG: Invalid time range - start time must be before end time"
|
||
)
|
||
return (
|
||
jsonify(
|
||
{
|
||
"error": "Invalid time range: start time must be before end time"
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
else:
|
||
# Legacy behavior: use time_window from now
|
||
end_time = datetime.now()
|
||
start_time = end_time - timedelta(seconds=time_window_seconds)
|
||
print(
|
||
f"🔍 DEBUG: Using time window calculation: {start_time} to {end_time}"
|
||
)
|
||
|
||
except Exception as e:
|
||
print(f"❌ DEBUG: Time calculation error: {e}")
|
||
return jsonify({"error": f"Time calculation failed: {str(e)}"}), 500
|
||
|
||
# Get relevant CSV files for cache checking
|
||
records_dir = get_records_directory()
|
||
csv_files = []
|
||
|
||
if os.path.exists(records_dir):
|
||
# Calculate buffered time range for file discovery (same as cache buffer)
|
||
duration = end_time - start_time
|
||
buffer_duration = duration * 0.25
|
||
buffer_start = start_time - buffer_duration
|
||
buffer_end = end_time + buffer_duration
|
||
|
||
current_date = buffer_start.date()
|
||
end_date = buffer_end.date()
|
||
|
||
# Collect all relevant CSV files
|
||
while current_date <= end_date:
|
||
date_str = current_date.strftime("%d-%m-%Y")
|
||
folder_path = os.path.join(records_dir, date_str)
|
||
if os.path.exists(folder_path):
|
||
csv_files.extend(glob.glob(os.path.join(folder_path, "*.csv")))
|
||
current_date += timedelta(days=1)
|
||
|
||
# print(f"🔍 DEBUG: Found {len(csv_files)} CSV files for cache checking")
|
||
|
||
# Try to get data from cache first
|
||
# TEMPORARY: Disable cache to debug data display issues
|
||
cached_data = None # historical_cache.get_cached_data(variables, start_time, end_time, csv_files)
|
||
|
||
if cached_data is not None:
|
||
print("<EFBFBD> DEBUG: Cache hit! Returning cached data")
|
||
|
||
# Convert DataFrame to the expected format
|
||
historical_data = []
|
||
for _, row in cached_data.iterrows():
|
||
if "timestamp" in row and "variable" in row and "value" in row:
|
||
historical_data.append(
|
||
{
|
||
"timestamp": (
|
||
row["timestamp"].isoformat()
|
||
if pd.api.types.is_datetime64_any_dtype(
|
||
row["timestamp"]
|
||
)
|
||
else str(row["timestamp"])
|
||
),
|
||
"variable": str(row["variable"]),
|
||
"value": (
|
||
float(row["value"])
|
||
if pd.api.types.is_numeric_dtype(type(row["value"]))
|
||
else row["value"]
|
||
),
|
||
}
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"data": historical_data,
|
||
"time_range": {
|
||
"start": start_time.isoformat(),
|
||
"end": end_time.isoformat(),
|
||
},
|
||
"variables_found": list(
|
||
set([item["variable"] for item in historical_data])
|
||
),
|
||
"total_points": len(historical_data),
|
||
"cached": True, # Indicate this was from cache
|
||
}
|
||
)
|
||
|
||
# Cache miss - load data from CSV files
|
||
print("🔍 DEBUG: Cache miss - loading from CSV files")
|
||
historical_data = []
|
||
|
||
if not os.path.exists(records_dir):
|
||
print("🔍 DEBUG: Records directory not found, returning empty data")
|
||
return jsonify(
|
||
{
|
||
"data": [],
|
||
"time_range": {
|
||
"start": start_time.isoformat(),
|
||
"end": end_time.isoformat(),
|
||
},
|
||
"variables_found": [],
|
||
"total_points": 0,
|
||
}
|
||
)
|
||
|
||
# Calculate extended range for cache storage (25% buffer)
|
||
duration = end_time - start_time
|
||
buffer_duration = duration * 0.25
|
||
buffered_start = start_time - buffer_duration
|
||
buffered_end = end_time + buffer_duration
|
||
|
||
# Search for CSV files in the buffered range
|
||
current_date = buffered_start.date()
|
||
end_date = buffered_end.date()
|
||
date_folders = []
|
||
|
||
while current_date <= end_date:
|
||
date_str = current_date.strftime("%d-%m-%Y")
|
||
folder_path = os.path.join(records_dir, date_str)
|
||
if os.path.exists(folder_path):
|
||
date_folders.append(folder_path)
|
||
current_date += timedelta(days=1)
|
||
|
||
# print(f"🔍 DEBUG: Processing {len(date_folders)} date folders with buffer")
|
||
|
||
# Process CSV files and collect all data (including buffer)
|
||
# Use a dictionary to collect data by timestamp, then fill missing values with NaN
|
||
timestamp_data = {} # {timestamp: {variable: value}}
|
||
all_timestamps = set()
|
||
|
||
for folder_path in date_folders:
|
||
csv_files_in_folder = glob.glob(os.path.join(folder_path, "*.csv"))
|
||
|
||
for csv_file in csv_files_in_folder:
|
||
try:
|
||
# Read CSV file with encoding detection
|
||
df = None
|
||
for encoding in ["utf-8", "utf-8-sig", "utf-16", "latin-1"]:
|
||
try:
|
||
df = pd.read_csv(
|
||
csv_file, encoding=encoding, on_bad_lines="skip"
|
||
)
|
||
break
|
||
except:
|
||
continue
|
||
|
||
if df is None:
|
||
continue
|
||
|
||
# Clean column names
|
||
df.columns = [
|
||
col.replace("\ufeff", "").replace("\x00", "").strip()
|
||
for col in df.columns
|
||
]
|
||
|
||
# Find timestamp column
|
||
timestamp_col = None
|
||
for col in df.columns:
|
||
if "timestamp" in col.lower():
|
||
timestamp_col = col
|
||
break
|
||
|
||
if timestamp_col is None:
|
||
continue
|
||
|
||
# Convert timestamps
|
||
df[timestamp_col] = pd.to_datetime(
|
||
df[timestamp_col], errors="coerce"
|
||
)
|
||
df = df.dropna(subset=[timestamp_col])
|
||
|
||
if df.empty:
|
||
continue
|
||
|
||
# Rename timestamp column for consistency
|
||
if timestamp_col != "timestamp":
|
||
df = df.rename(columns={timestamp_col: "timestamp"})
|
||
|
||
# Filter to buffered time range
|
||
mask = (df["timestamp"] >= buffered_start) & (
|
||
df["timestamp"] <= buffered_end
|
||
)
|
||
filtered_df = df[mask]
|
||
|
||
if filtered_df.empty:
|
||
continue
|
||
|
||
# Check for matching variables
|
||
matching_vars = [
|
||
var for var in variables if var in filtered_df.columns
|
||
]
|
||
|
||
if not matching_vars:
|
||
continue
|
||
|
||
# print(f"🔍 DEBUG: File {csv_file} - Found {len(matching_vars)} matching variables: {matching_vars}")
|
||
|
||
# Extract data for each timestamp
|
||
for _, row in filtered_df.iterrows():
|
||
timestamp = row["timestamp"]
|
||
all_timestamps.add(timestamp)
|
||
|
||
if timestamp not in timestamp_data:
|
||
timestamp_data[timestamp] = {}
|
||
|
||
# Store data for available variables, others will be NaN
|
||
for var in matching_vars:
|
||
if var in row:
|
||
raw_value = row[var]
|
||
if pd.notna(raw_value):
|
||
try:
|
||
# Type conversion
|
||
if isinstance(raw_value, str):
|
||
value_lower = raw_value.lower().strip()
|
||
if value_lower == "true":
|
||
value = True
|
||
elif value_lower == "false":
|
||
value = False
|
||
else:
|
||
try:
|
||
value = float(raw_value)
|
||
except ValueError:
|
||
value = None
|
||
elif isinstance(raw_value, (int, float)):
|
||
value = float(raw_value)
|
||
else:
|
||
value = None
|
||
|
||
timestamp_data[timestamp][var] = value
|
||
except:
|
||
timestamp_data[timestamp][var] = None
|
||
else:
|
||
timestamp_data[timestamp][var] = None
|
||
|
||
except Exception as e:
|
||
print(f"Warning: Could not read CSV file {csv_file}: {e}")
|
||
continue
|
||
|
||
# Convert to list format for DataFrame creation
|
||
all_data_for_cache = []
|
||
for timestamp in sorted(all_timestamps):
|
||
for var in variables:
|
||
# Get value or None if not available
|
||
value = timestamp_data[timestamp].get(var, None)
|
||
all_data_for_cache.append(
|
||
{
|
||
"timestamp": timestamp,
|
||
"variable": var,
|
||
"value": value,
|
||
}
|
||
)
|
||
|
||
print(
|
||
f"🔍 DEBUG: Collected {len(all_data_for_cache)} total data points from {len(all_timestamps)} timestamps for {len(variables)} variables"
|
||
)
|
||
|
||
# Convert to DataFrame for caching
|
||
if all_data_for_cache:
|
||
cache_df = pd.DataFrame(all_data_for_cache)
|
||
cache_df = cache_df.sort_values("timestamp")
|
||
|
||
# Store in cache
|
||
historical_cache.store_data(variables, start_time, end_time, cache_df)
|
||
|
||
# Filter to actual requested range for response
|
||
response_mask = (cache_df["timestamp"] >= start_time) & (
|
||
cache_df["timestamp"] <= end_time
|
||
)
|
||
response_df = cache_df[response_mask]
|
||
|
||
# Convert to response format
|
||
historical_data = []
|
||
for _, row in response_df.iterrows():
|
||
# Include all data points, even with None values (converted to null in JSON)
|
||
value = row["value"]
|
||
# Convert pandas NaN to None for proper JSON serialization
|
||
if pd.isna(value):
|
||
value = None
|
||
|
||
historical_data.append(
|
||
{
|
||
"timestamp": row["timestamp"].isoformat(),
|
||
"variable": row["variable"],
|
||
"value": value,
|
||
}
|
||
)
|
||
|
||
print(f"🔍 DEBUG: Loaded {len(historical_data)} data points for response")
|
||
print(f"🔍 DEBUG: Cached {len(all_data_for_cache)} total data points")
|
||
|
||
return jsonify(
|
||
{
|
||
"data": historical_data,
|
||
"time_range": {
|
||
"start": start_time.isoformat(),
|
||
"end": end_time.isoformat(),
|
||
},
|
||
"variables_found": list(
|
||
set([item["variable"] for item in historical_data])
|
||
),
|
||
"total_points": len(historical_data),
|
||
"cached": False, # Indicate this was loaded fresh
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
import traceback
|
||
|
||
traceback.print_exc()
|
||
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
|
||
|
||
|
||
@app.route("/api/plots/historical/cache/stats", methods=["GET"])
|
||
def get_cache_stats():
|
||
"""Get cache statistics for monitoring"""
|
||
global historical_cache
|
||
|
||
try:
|
||
if historical_cache is None:
|
||
return jsonify(
|
||
{
|
||
"cache_initialized": False,
|
||
"stats": {
|
||
"entries": 0,
|
||
"total_size_mb": 0,
|
||
"max_entries": 0,
|
||
"buffer_percentage": 0,
|
||
},
|
||
}
|
||
)
|
||
|
||
stats = historical_cache.get_cache_stats()
|
||
return jsonify({"cache_initialized": True, "stats": stats})
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/historical/cache/clear", methods=["POST"])
|
||
def clear_cache():
|
||
"""Clear the historical data cache"""
|
||
global historical_cache
|
||
|
||
try:
|
||
if historical_cache is not None:
|
||
historical_cache.clear_cache()
|
||
return jsonify({"success": True, "message": "Cache cleared successfully"})
|
||
else:
|
||
return jsonify({"success": False, "message": "Cache not initialized"})
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/historical/date-range", methods=["GET"])
|
||
def get_historical_date_range():
|
||
"""Get the available date range from CSV files"""
|
||
try:
|
||
import pandas as pd
|
||
import glob
|
||
|
||
# Get records directory
|
||
records_dir = get_records_directory()
|
||
|
||
if not os.path.exists(records_dir):
|
||
return (
|
||
jsonify({"success": False, "error": "No records directory found"}),
|
||
404,
|
||
)
|
||
|
||
# Find all date folders (format: DD-MM-YYYY)
|
||
date_folders = []
|
||
for item in os.listdir(records_dir):
|
||
folder_path = os.path.join(records_dir, item)
|
||
if os.path.isdir(folder_path):
|
||
try:
|
||
# Try to parse the folder name as a date
|
||
date_obj = datetime.strptime(item, "%d-%m-%Y")
|
||
date_folders.append((date_obj, folder_path))
|
||
except ValueError:
|
||
continue
|
||
|
||
if not date_folders:
|
||
return (
|
||
jsonify({"success": False, "error": "No valid date folders found"}),
|
||
404,
|
||
)
|
||
|
||
# Sort by date
|
||
date_folders.sort(key=lambda x: x[0])
|
||
|
||
# Get the earliest and latest dates
|
||
earliest_date = date_folders[0][0]
|
||
latest_date = date_folders[-1][0]
|
||
|
||
# For more precise range, check actual CSV file timestamps
|
||
min_timestamp = None
|
||
max_timestamp = None
|
||
|
||
# Check files more thoroughly to get precise timestamp range
|
||
for date_obj, folder_path in date_folders:
|
||
csv_files = glob.glob(os.path.join(folder_path, "*.csv"))
|
||
|
||
for csv_file in csv_files:
|
||
try:
|
||
# Try to read timestamp range from CSV with better sampling
|
||
# Read first and last few rows to get min/max more accurately
|
||
df_head = pd.read_csv(csv_file, nrows=100, encoding="utf-8-sig")
|
||
df_tail = pd.read_csv(csv_file, encoding="utf-8-sig").tail(100)
|
||
|
||
# Combine head and tail for better range detection
|
||
df_sample = pd.concat([df_head, df_tail]).drop_duplicates()
|
||
|
||
# Find timestamp column
|
||
timestamp_col = None
|
||
for col in df_sample.columns:
|
||
if "timestamp" in col.lower():
|
||
timestamp_col = col
|
||
break
|
||
|
||
if timestamp_col:
|
||
# Convert timestamp with multiple format attempts
|
||
df_sample[timestamp_col] = pd.to_datetime(
|
||
df_sample[timestamp_col], errors="coerce"
|
||
)
|
||
df_sample = df_sample.dropna(subset=[timestamp_col])
|
||
|
||
if not df_sample.empty:
|
||
file_min = df_sample[timestamp_col].min()
|
||
file_max = df_sample[timestamp_col].max()
|
||
|
||
# Convert to timezone-naive datetime if needed
|
||
if (
|
||
hasattr(file_min, "tz_localize")
|
||
and file_min.tz is not None
|
||
):
|
||
file_min = file_min.tz_localize(None)
|
||
if (
|
||
hasattr(file_max, "tz_localize")
|
||
and file_max.tz is not None
|
||
):
|
||
file_max = file_max.tz_localize(None)
|
||
|
||
if min_timestamp is None or file_min < min_timestamp:
|
||
min_timestamp = file_min
|
||
if max_timestamp is None or file_max > max_timestamp:
|
||
max_timestamp = file_max
|
||
|
||
# print(f"🔍 DEBUG: File {csv_file} - Range: {file_min} to {file_max}")
|
||
|
||
except Exception as e:
|
||
# print(f"🔍 DEBUG: Error reading {csv_file}: {e}")
|
||
continue
|
||
|
||
# Use folder dates as fallback if we couldn't read CSV timestamps
|
||
if min_timestamp is None:
|
||
min_timestamp = earliest_date
|
||
print(f"🔍 DEBUG: Using earliest folder date as min: {earliest_date}")
|
||
if max_timestamp is None:
|
||
max_timestamp = latest_date + timedelta(days=1)
|
||
print(f"🔍 DEBUG: Using latest folder date as max: {latest_date}")
|
||
|
||
print(f"🔍 DEBUG: Final timestamp range: {min_timestamp} to {max_timestamp}")
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"date_range": {
|
||
"min_date": min_timestamp.isoformat(),
|
||
"max_date": max_timestamp.isoformat(),
|
||
"folders_count": len(date_folders),
|
||
"earliest_folder": earliest_date.strftime("%d-%m-%Y"),
|
||
"latest_folder": latest_date.strftime("%d-%m-%Y"),
|
||
},
|
||
}
|
||
)
|
||
|
||
except ImportError:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"error": "pandas is required for date range calculation",
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/historical/data-segments", methods=["GET"])
|
||
def get_historical_data_segments():
|
||
"""Get data availability segments for visualization"""
|
||
try:
|
||
import glob
|
||
|
||
records_dir = get_records_directory()
|
||
|
||
if not os.path.exists(records_dir):
|
||
return (
|
||
jsonify({"success": False, "error": "No records directory found"}),
|
||
404,
|
||
)
|
||
|
||
segments = []
|
||
|
||
# Find all date folders (format: DD-MM-YYYY)
|
||
date_folders = []
|
||
for item in os.listdir(records_dir):
|
||
folder_path = os.path.join(records_dir, item)
|
||
if os.path.isdir(folder_path):
|
||
try:
|
||
# Try to parse the folder name as a date
|
||
date_obj = datetime.strptime(item, "%d-%m-%Y")
|
||
date_folders.append((date_obj, folder_path))
|
||
except ValueError:
|
||
continue
|
||
|
||
# Sort by date
|
||
date_folders.sort(key=lambda x: x[0])
|
||
|
||
# For each date folder, find file segments
|
||
for date_obj, folder_path in date_folders:
|
||
csv_files = glob.glob(os.path.join(folder_path, "*.csv"))
|
||
|
||
# Group files by dataset name (prefix before _XX.csv)
|
||
dataset_files = {}
|
||
for csv_file in csv_files:
|
||
filename = os.path.basename(csv_file)
|
||
if "_" in filename:
|
||
# Extract dataset name (e.g., "fast" from "fast_00.csv")
|
||
dataset_name = filename.split("_")[0]
|
||
if dataset_name not in dataset_files:
|
||
dataset_files[dataset_name] = []
|
||
dataset_files[dataset_name].append(csv_file)
|
||
|
||
# For each dataset, create segments based on file sequence
|
||
for dataset_name, files in dataset_files.items():
|
||
# Sort files by sequence number
|
||
files.sort()
|
||
|
||
if not files:
|
||
continue
|
||
|
||
try:
|
||
# Get time range from first and last file quickly
|
||
# Read only first few lines of first file and last few lines of last file
|
||
first_file = files[0]
|
||
last_file = files[-1]
|
||
|
||
# Quick read of timestamps from file headers/footers
|
||
start_time = None
|
||
end_time = None
|
||
|
||
# Read first file's start time
|
||
with open(first_file, "r", encoding="utf-8-sig") as f:
|
||
lines = f.readlines()
|
||
if len(lines) > 1: # Skip header
|
||
first_data_line = lines[1].strip()
|
||
if first_data_line and "," in first_data_line:
|
||
timestamp_str = first_data_line.split(",")[0]
|
||
try:
|
||
start_time = datetime.fromisoformat(
|
||
timestamp_str.replace("Z", "+00:00")
|
||
)
|
||
if start_time.tzinfo:
|
||
start_time = start_time.replace(tzinfo=None)
|
||
except:
|
||
continue
|
||
|
||
# Read last file's end time
|
||
with open(last_file, "r", encoding="utf-8-sig") as f:
|
||
lines = f.readlines()
|
||
# Find last non-empty line with data
|
||
for line in reversed(lines):
|
||
line = line.strip()
|
||
if (
|
||
line
|
||
and "," in line
|
||
and not line.startswith("timestamp")
|
||
):
|
||
timestamp_str = line.split(",")[0]
|
||
try:
|
||
end_time = datetime.fromisoformat(
|
||
timestamp_str.replace("Z", "+00:00")
|
||
)
|
||
if end_time.tzinfo:
|
||
end_time = end_time.replace(tzinfo=None)
|
||
break
|
||
except:
|
||
continue
|
||
|
||
if start_time and end_time:
|
||
segments.append(
|
||
{
|
||
"dataset": dataset_name,
|
||
"start": start_time.isoformat(),
|
||
"end": end_time.isoformat(),
|
||
"files_count": len(files),
|
||
"date_folder": date_obj.strftime("%d-%m-%Y"),
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
# Skip problematic files, don't break the entire response
|
||
backend_logger.warning(
|
||
f"Error processing dataset {dataset_name} in {folder_path}: {e}"
|
||
)
|
||
continue
|
||
|
||
return jsonify(
|
||
{"success": True, "segments": segments, "total_segments": len(segments)}
|
||
)
|
||
|
||
except Exception as e:
|
||
backend_logger.error(f"Error getting data segments: {e}")
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/sessions/<plot_id>", methods=["GET"])
|
||
def get_plot_sessions(plot_id):
|
||
"""Get all session IDs for a specific plot ID"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
session_ids = streamer.data_streamer.plot_manager.get_sessions_by_plot_id(
|
||
plot_id
|
||
)
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"plot_id": plot_id,
|
||
"session_ids": session_ids,
|
||
"session_count": len(session_ids),
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plots/cleanup", methods=["POST"])
|
||
def cleanup_plot_sessions():
|
||
"""Manually cleanup inactive plot sessions"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
data = request.get_json() or {}
|
||
max_age_seconds = data.get("max_age_seconds", 3600) # Default 1 hour
|
||
|
||
removed_count = streamer.data_streamer.plot_manager.cleanup_inactive_sessions(
|
||
max_age_seconds
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"removed_sessions": removed_count,
|
||
"message": f"Cleaned up {removed_count} inactive plot sessions",
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/status")
|
||
def get_status():
|
||
"""Get current status"""
|
||
if streamer is None:
|
||
return jsonify({"error": "Application not initialized"}), 503
|
||
return jsonify(streamer.get_status())
|
||
|
||
|
||
@app.route("/api/priority/status")
|
||
def get_priority_status():
|
||
"""🔑 Get detailed priority and recording protection status"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
priority_stats = streamer.data_streamer.get_streaming_stats()
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"priority_protection": priority_stats,
|
||
"message": "Priority protection ensures CSV recording has maximum priority over API operations",
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/performance/current")
|
||
def get_current_performance():
|
||
"""📊 Get current performance metrics (last 10 seconds)"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
current_stats = streamer.data_streamer.performance_monitor.get_current_stats()
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"current_performance": current_stats,
|
||
"message": "Current performance metrics for active recording operations",
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/performance/historical")
|
||
def get_historical_performance():
|
||
"""📊 Get historical performance metrics"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
# Get query parameters
|
||
windows = int(request.args.get("windows", 6)) # Default: last minute (6 * 10s)
|
||
windows = min(max(windows, 1), 60) # Limit between 1 and 60 windows
|
||
|
||
historical_stats = (
|
||
streamer.data_streamer.performance_monitor.get_historical_stats(windows)
|
||
)
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"historical_performance": historical_stats,
|
||
"windows_requested": windows,
|
||
"duration_minutes": windows * 10 / 60,
|
||
"message": f"Historical performance metrics for last {windows} windows ({windows*10} seconds)",
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/optimization/batch-reading")
|
||
def get_batch_reading_optimization_stats():
|
||
"""🚀 Get batch reading optimization statistics"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
stats = streamer.get_batch_reading_stats()
|
||
return jsonify({"success": True, "batch_reading_optimization": stats})
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plc/reconnection/status")
|
||
def get_plc_reconnection_status():
|
||
"""Get detailed PLC reconnection status"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
status = streamer.plc_client.get_reconnection_status()
|
||
connection_info = streamer.plc_client.get_connection_info()
|
||
|
||
return jsonify(
|
||
{"success": True, "reconnection": status, "connection": connection_info}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plc/reconnection/enable", methods=["POST"])
|
||
def enable_plc_reconnection():
|
||
"""Enable automatic PLC reconnection"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.plc_client.enable_automatic_reconnection(True)
|
||
return jsonify(
|
||
{"success": True, "message": "Automatic PLC reconnection enabled"}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plc/reconnection/disable", methods=["POST"])
|
||
def disable_plc_reconnection():
|
||
"""Disable automatic PLC reconnection"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
streamer.plc_client.enable_automatic_reconnection(False)
|
||
return jsonify(
|
||
{"success": True, "message": "Automatic PLC reconnection disabled"}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/events")
|
||
def get_events():
|
||
"""Get recent events from the application log"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
try:
|
||
limit = request.args.get("limit", 50, type=int)
|
||
limit = min(limit, 200) # Maximum 200 events per request
|
||
|
||
events = streamer.get_recent_events(limit)
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"events": events,
|
||
"total_events": len(streamer.event_logger.events_log),
|
||
"showing": len(events),
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/console-logs")
|
||
def get_console_logs():
|
||
"""Get recent console logs from the log files"""
|
||
try:
|
||
limit = request.args.get("limit", 1000, type=int)
|
||
limit = min(limit, 5000) # Maximum 5000 lines per request
|
||
|
||
log_dir = ".logs"
|
||
if not os.path.exists(log_dir):
|
||
return jsonify(
|
||
{"success": True, "logs": [], "message": "No log directory found"}
|
||
)
|
||
|
||
# Get all log files sorted by modification time (newest first)
|
||
log_files = []
|
||
for filename in os.listdir(log_dir):
|
||
if filename.startswith("backend_") and filename.endswith(".log"):
|
||
filepath = os.path.join(log_dir, filename)
|
||
log_files.append((filepath, os.path.getmtime(filepath)))
|
||
|
||
log_files.sort(key=lambda x: x[1], reverse=True)
|
||
|
||
logs = []
|
||
lines_read = 0
|
||
|
||
# Read logs from newest to oldest files
|
||
for filepath, _ in log_files:
|
||
if lines_read >= limit:
|
||
break
|
||
|
||
try:
|
||
with open(filepath, "r", encoding="utf-8") as f:
|
||
file_lines = f.readlines()
|
||
|
||
# Take the last lines from the current file if we haven't reached the limit
|
||
remaining_lines = limit - lines_read
|
||
if len(file_lines) > remaining_lines:
|
||
file_lines = file_lines[-remaining_lines:]
|
||
|
||
# Add lines in reverse order (newest first within each file)
|
||
for line in reversed(file_lines):
|
||
if lines_read >= limit:
|
||
break
|
||
line = line.strip()
|
||
if line: # Skip empty lines
|
||
logs.append(line)
|
||
lines_read += 1
|
||
|
||
except Exception as e:
|
||
backend_logger.error(f"Error reading log file {filepath}: {e}")
|
||
continue
|
||
|
||
# Don't reverse - logs are already in newest-first order
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"logs": logs,
|
||
"total_lines": lines_read,
|
||
"files_read": len([f for f, _ in log_files]),
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
backend_logger.error(f"Error getting console logs: {e}")
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/stream/variables", methods=["GET"])
|
||
def stream_variables():
|
||
"""Stream variable values in real-time using Server-Sent Events"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
# Get request parameters outside the generator
|
||
dataset_id = request.args.get("dataset_id")
|
||
interval = float(request.args.get("interval", 1.0)) # Default 1 second
|
||
|
||
if not dataset_id:
|
||
return jsonify({"error": "Dataset ID required"}), 400
|
||
|
||
if dataset_id not in streamer.datasets:
|
||
return jsonify({"error": f"Dataset {dataset_id} not found"}), 404
|
||
|
||
def generate():
|
||
"""Generate SSE data stream - CACHE ONLY"""
|
||
|
||
# Send initial connection message
|
||
yield f"data: {json.dumps({'type': 'connected', 'message': 'SSE connection established - monitoring cache'})}\n\n"
|
||
|
||
last_values = {}
|
||
|
||
while True:
|
||
try:
|
||
# Check basic preconditions for cache availability
|
||
if not streamer.plc_client.is_connected():
|
||
# PLC not connected - cache won't be populated
|
||
data = {
|
||
"type": "plc_disconnected",
|
||
"message": "PLC not connected - cache not being populated",
|
||
"timestamp": datetime.now().isoformat(),
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
time.sleep(interval)
|
||
continue
|
||
|
||
# Check if dataset is active
|
||
if dataset_id not in streamer.active_datasets:
|
||
data = {
|
||
"type": "dataset_inactive",
|
||
"message": f"Dataset '{dataset_id}' is not active - activate to populate cache",
|
||
"timestamp": datetime.now().isoformat(),
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
time.sleep(interval)
|
||
continue
|
||
|
||
# Get dataset variables
|
||
dataset_variables = streamer.get_dataset_variables(dataset_id)
|
||
|
||
if not dataset_variables:
|
||
# No variables in dataset
|
||
data = {
|
||
"type": "no_variables",
|
||
"message": "No variables defined in this dataset",
|
||
"timestamp": datetime.now().isoformat(),
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
time.sleep(interval)
|
||
continue
|
||
|
||
# Get cached values - the ONLY source according to application principles
|
||
if not streamer.has_cached_values(dataset_id):
|
||
# No cache available yet - dataset might be starting up
|
||
sampling_interval = (
|
||
streamer.config_manager.get_dataset_sampling_interval(
|
||
dataset_id
|
||
)
|
||
)
|
||
data = {
|
||
"type": "no_cache",
|
||
"message": f"Waiting for cache to be populated (dataset reads every {sampling_interval}s)",
|
||
"timestamp": datetime.now().isoformat(),
|
||
"sampling_interval": sampling_interval,
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
time.sleep(interval)
|
||
continue
|
||
|
||
# Get cached values (the ONLY valid source)
|
||
read_result = streamer.get_cached_dataset_values(dataset_id)
|
||
|
||
if read_result.get("success", False):
|
||
values = read_result.get("values", {})
|
||
timestamp = read_result.get("timestamp", datetime.now().isoformat())
|
||
|
||
# Format values for display
|
||
formatted_values = {}
|
||
for var_name, value in values.items():
|
||
if value is not None:
|
||
var_config = dataset_variables[var_name]
|
||
var_type = var_config.get("type", "unknown")
|
||
|
||
try:
|
||
if var_type == "real":
|
||
formatted_values[var_name] = (
|
||
f"{value:.3f}"
|
||
if isinstance(value, (int, float))
|
||
else str(value)
|
||
)
|
||
elif var_type == "bool":
|
||
formatted_values[var_name] = (
|
||
"TRUE" if value else "FALSE"
|
||
)
|
||
elif var_type in [
|
||
"int",
|
||
"uint",
|
||
"dint",
|
||
"udint",
|
||
"word",
|
||
"byte",
|
||
"sint",
|
||
"usint",
|
||
]:
|
||
formatted_values[var_name] = (
|
||
str(int(value))
|
||
if isinstance(value, (int, float))
|
||
else str(value)
|
||
)
|
||
else:
|
||
formatted_values[var_name] = str(value)
|
||
except:
|
||
formatted_values[var_name] = "FORMAT_ERROR"
|
||
else:
|
||
formatted_values[var_name] = "ERROR"
|
||
|
||
# Only send if values changed
|
||
if formatted_values != last_values:
|
||
data = {
|
||
"type": "values",
|
||
"values": formatted_values,
|
||
"timestamp": timestamp,
|
||
"source": "cache",
|
||
"stats": read_result.get("stats", {}),
|
||
"cache_age_info": f"Dataset reads every {streamer.config_manager.get_dataset_sampling_interval(dataset_id)}s",
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
last_values = formatted_values.copy()
|
||
else:
|
||
# Send error data from cache
|
||
error_data = {
|
||
"type": "cache_error",
|
||
"message": read_result.get(
|
||
"error", "Cached data indicates error"
|
||
),
|
||
"timestamp": datetime.now().isoformat(),
|
||
"error_type": read_result.get("error_type", "unknown"),
|
||
"source": "cache",
|
||
}
|
||
yield f"data: {json.dumps(error_data)}\n\n"
|
||
|
||
time.sleep(interval)
|
||
|
||
except Exception as e:
|
||
error_data = {
|
||
"type": "stream_error",
|
||
"message": f"SSE stream error: {str(e)}",
|
||
"timestamp": datetime.now().isoformat(),
|
||
"source": "cache_monitoring",
|
||
}
|
||
yield f"data: {json.dumps(error_data)}\n\n"
|
||
time.sleep(interval)
|
||
|
||
return Response(
|
||
generate(),
|
||
mimetype="text/event-stream",
|
||
headers={
|
||
"Cache-Control": "no-cache",
|
||
"Connection": "keep-alive",
|
||
"Access-Control-Allow-Origin": "*",
|
||
"Access-Control-Allow-Headers": "Cache-Control",
|
||
},
|
||
)
|
||
|
||
|
||
@app.route("/api/stream/status", methods=["GET"])
|
||
def stream_status():
|
||
"""Stream application status in real-time using Server-Sent Events"""
|
||
error_response = check_streamer_initialized()
|
||
if error_response:
|
||
return error_response
|
||
|
||
# Get request parameters outside the generator
|
||
interval = float(request.args.get("interval", 2.0)) # Default 2 seconds
|
||
|
||
def generate():
|
||
"""Generate SSE status stream"""
|
||
last_status = None
|
||
|
||
# Send initial connection message
|
||
yield f"data: {json.dumps({'type': 'connected', 'message': 'Status stream connected'})}\n\n"
|
||
|
||
while True:
|
||
try:
|
||
# Get current status
|
||
current_status = streamer.get_status()
|
||
|
||
# Only send if status changed
|
||
if current_status != last_status:
|
||
data = {
|
||
"type": "status",
|
||
"status": current_status,
|
||
"timestamp": datetime.now().isoformat(),
|
||
}
|
||
yield f"data: {json.dumps(data)}\n\n"
|
||
last_status = current_status
|
||
|
||
time.sleep(interval)
|
||
|
||
except Exception as e:
|
||
error_data = {
|
||
"type": "error",
|
||
"message": f"Status stream error: {str(e)}",
|
||
"timestamp": datetime.now().isoformat(),
|
||
}
|
||
yield f"data: {json.dumps(error_data)}\n\n"
|
||
time.sleep(interval)
|
||
|
||
return Response(
|
||
generate(),
|
||
mimetype="text/event-stream",
|
||
headers={
|
||
"Cache-Control": "no-cache",
|
||
"Connection": "keep-alive",
|
||
"Access-Control-Allow-Origin": "*",
|
||
"Access-Control-Allow-Headers": "Cache-Control",
|
||
},
|
||
)
|
||
|
||
|
||
def graceful_shutdown():
|
||
"""Perform graceful shutdown with robust instance cleanup"""
|
||
print("\n⏹️ Performing graceful shutdown...")
|
||
try:
|
||
if streamer is not None:
|
||
print("🛑 Stopping streaming...")
|
||
streamer.stop_streaming()
|
||
|
||
print("📡 Disconnecting PLC...")
|
||
# 🚀 PRESERVE auto-reconnect state: Don't clear reconnection data
|
||
streamer.disconnect_plc(
|
||
manual_disconnect=False
|
||
) # Keep auto-reconnect state
|
||
|
||
print("🧹 Shutting down priority manager and performance monitor...")
|
||
# 🔑 PRIORITY: Shutdown priority manager and performance monitor
|
||
if hasattr(streamer.data_streamer, "priority_manager"):
|
||
streamer.data_streamer.priority_manager.shutdown()
|
||
if hasattr(streamer.data_streamer, "performance_monitor"):
|
||
streamer.data_streamer.performance_monitor.stop_monitoring()
|
||
|
||
print("🔓 Releasing instance lock...")
|
||
streamer.release_instance_lock()
|
||
print("✅ Instance lock released")
|
||
else:
|
||
print("⚠️ Streamer not initialized, skipping shutdown steps")
|
||
|
||
# 🔒 ROBUST CLEANUP: Use instance manager for reliable lock file cleanup
|
||
print("🧹 Cleaning up instance lock file...")
|
||
try:
|
||
instance_manager = InstanceManager(port=5050, lock_file="plc_streamer.lock")
|
||
if instance_manager.cleanup_instance():
|
||
print("✅ Instance lock file cleaned up successfully")
|
||
else:
|
||
print("⚠️ Warning: Instance lock file cleanup had issues")
|
||
except Exception as cleanup_error:
|
||
print(f"⚠️ Error during instance cleanup: {cleanup_error}")
|
||
# Fallback to direct file removal
|
||
try:
|
||
import os
|
||
|
||
lock_file = "plc_streamer.lock"
|
||
if os.path.exists(lock_file):
|
||
os.remove(lock_file)
|
||
print(f"🧹 Emergency cleanup: Removed lock file directly")
|
||
except:
|
||
pass # Silent fail for emergency cleanup
|
||
|
||
print("📝 Closing rotating logger system...")
|
||
# 📝 Close rotating logger system
|
||
backend_logger.close()
|
||
print("✅ Shutdown completed successfully")
|
||
|
||
except Exception as e:
|
||
print(f"⚠️ Error during shutdown: {e}")
|
||
# Try to force cleanup lock file as last resort
|
||
try:
|
||
import os
|
||
|
||
lock_file = "plc_streamer.lock"
|
||
if os.path.exists(lock_file):
|
||
os.remove(lock_file)
|
||
print(f"🧹 Emergency cleanup: Removed lock file")
|
||
except:
|
||
pass # Silent fail for emergency cleanup
|
||
|
||
|
||
def signal_handler(sig, frame):
|
||
"""Handle interrupt signals (Ctrl+C) with proper cleanup"""
|
||
print(f"\n🛑 Received signal {sig}...")
|
||
graceful_shutdown()
|
||
print("👋 Exiting application...")
|
||
sys.exit(0)
|
||
|
||
|
||
# Global variables for Flask and tray management
|
||
flask_thread = None
|
||
tray_icon = None
|
||
|
||
|
||
def open_app_browser(icon, item):
|
||
"""Open application in web browser"""
|
||
import webbrowser
|
||
|
||
webbrowser.open("http://localhost:5050")
|
||
|
||
|
||
def shutdown_from_tray(icon, item):
|
||
"""Shutdown Flask server from tray menu"""
|
||
print("🔄 Shutdown requested from system tray...")
|
||
graceful_shutdown()
|
||
if tray_icon:
|
||
tray_icon.stop()
|
||
|
||
|
||
def exit_application(icon, item):
|
||
"""Exit entire application from tray menu"""
|
||
print("🚪 Exit requested from system tray...")
|
||
graceful_shutdown()
|
||
if tray_icon:
|
||
tray_icon.stop()
|
||
sys.exit(0)
|
||
|
||
|
||
def run_flask_app():
|
||
"""Run Flask application in a separate thread"""
|
||
global streamer
|
||
try:
|
||
print("🚀 Starting Flask server for PLC S7-315 Streamer")
|
||
print("📊 Web interface available at: http://localhost:5050")
|
||
print("🔧 Configure your PLC and variables through the web interface")
|
||
|
||
# Initialize streamer (this will handle instance locking and auto-recovery)
|
||
streamer = PLCDataStreamer()
|
||
|
||
# Start Flask application
|
||
app.run(
|
||
debug=False,
|
||
host="0.0.0.0",
|
||
port=5050,
|
||
use_reloader=False,
|
||
threaded=True,
|
||
)
|
||
except Exception as e:
|
||
print(f"💥 Flask error: {e}")
|
||
|
||
|
||
def main():
|
||
"""Main application entry point with system tray support"""
|
||
global flask_thread, tray_icon, streamer
|
||
|
||
# Setup signal handlers for graceful shutdown
|
||
signal.signal(signal.SIGINT, signal_handler)
|
||
signal.signal(signal.SIGTERM, signal_handler)
|
||
|
||
max_retries = 3
|
||
retry_count = 0
|
||
|
||
# Check if tray is available and try to setup system tray
|
||
if TRAY_AVAILABLE:
|
||
try:
|
||
# Start Flask in a separate thread
|
||
flask_thread = threading.Thread(target=run_flask_app, daemon=True)
|
||
flask_thread.start()
|
||
|
||
# Give Flask time to start
|
||
time.sleep(2)
|
||
|
||
# Setup and run the system tray icon
|
||
# Use the icon from public folder (included in frontend/dist build)
|
||
icon_path = resource_path(os.path.join("frontend", "dist", "record.png"))
|
||
try:
|
||
image = Image.open(icon_path)
|
||
menu = pystray.Menu(
|
||
pystray.MenuItem(
|
||
"🌐 Abrir PLC Streamer", open_app_browser, default=True
|
||
),
|
||
pystray.MenuItem("🛑 Cerrar servidor", shutdown_from_tray),
|
||
pystray.MenuItem("🚪 Salir", exit_application),
|
||
)
|
||
tray_icon = pystray.Icon(
|
||
"PLC S7-315 Streamer", image, "PLC S7-315 Streamer & Logger", menu
|
||
)
|
||
print("🎯 Starting system tray icon...")
|
||
tray_icon.run() # This blocks the main thread until icon.stop() is called
|
||
except FileNotFoundError:
|
||
print(
|
||
f"⚠️ Error: Icon not found at '{icon_path}'. System tray will not start."
|
||
)
|
||
print(
|
||
"🔧 The Flask application will continue running in background. Press Ctrl+C to stop."
|
||
)
|
||
# Keep the main thread alive so the Flask thread doesn't exit immediately
|
||
try:
|
||
while flask_thread.is_alive():
|
||
flask_thread.join(timeout=1.0)
|
||
except KeyboardInterrupt:
|
||
print("\n⏸️ Ctrl+C detected. Stopping Flask...")
|
||
graceful_shutdown()
|
||
print("👋 Exiting.")
|
||
except Exception as e:
|
||
print(f"⚠️ Error starting system tray: {e}")
|
||
# Keep Flask running without tray
|
||
try:
|
||
while flask_thread.is_alive():
|
||
flask_thread.join(timeout=1.0)
|
||
except KeyboardInterrupt:
|
||
print("\n⏸️ Ctrl+C detected. Stopping Flask...")
|
||
graceful_shutdown()
|
||
print("👋 Exiting.")
|
||
|
||
except Exception as e:
|
||
print(f"💥 Error with threaded execution: {e}")
|
||
# Fallback to original single-threaded mode
|
||
retry_count = 0
|
||
while retry_count < max_retries:
|
||
try:
|
||
print(
|
||
"🚀 Starting Flask server for PLC S7-315 Streamer (fallback mode)"
|
||
)
|
||
print("📊 Web interface available at: http://localhost:5050")
|
||
print(
|
||
"🔧 Configure your PLC and variables through the web interface"
|
||
)
|
||
|
||
# Initialize streamer
|
||
streamer = PLCDataStreamer()
|
||
|
||
# Start Flask application
|
||
app.run(
|
||
debug=False,
|
||
host="0.0.0.0",
|
||
port=5050,
|
||
use_reloader=False,
|
||
threaded=True,
|
||
)
|
||
break
|
||
|
||
except RuntimeError as e:
|
||
if "Another instance" in str(e):
|
||
print(f"❌ {e}")
|
||
print(
|
||
"💡 Tip: Stop the other instance or wait for it to finish"
|
||
)
|
||
sys.exit(1)
|
||
else:
|
||
print(f"⚠️ Runtime error: {e}")
|
||
retry_count += 1
|
||
|
||
except KeyboardInterrupt:
|
||
print("\n⏸️ Received interrupt signal...")
|
||
graceful_shutdown()
|
||
break
|
||
|
||
except Exception as e:
|
||
print(f"💥 Unexpected error: {e}")
|
||
retry_count += 1
|
||
|
||
if retry_count < max_retries:
|
||
print(f"🔄 Attempting restart ({retry_count}/{max_retries})...")
|
||
time.sleep(2)
|
||
else:
|
||
print("❌ Maximum retries reached. Exiting...")
|
||
graceful_shutdown()
|
||
sys.exit(1)
|
||
else:
|
||
# Original mode without system tray (when pystray is not available)
|
||
print("⚠️ System tray not available. Running in console mode.")
|
||
while retry_count < max_retries:
|
||
try:
|
||
print("🚀 Starting Flask server for PLC S7-315 Streamer")
|
||
print("📊 Web interface available at: http://localhost:5050")
|
||
print("🔧 Configure your PLC and variables through the web interface")
|
||
|
||
# Initialize streamer
|
||
streamer = PLCDataStreamer()
|
||
|
||
# Start Flask application
|
||
app.run(
|
||
debug=False,
|
||
host="0.0.0.0",
|
||
port=5050,
|
||
use_reloader=False,
|
||
threaded=True,
|
||
)
|
||
break
|
||
|
||
except RuntimeError as e:
|
||
if "Another instance" in str(e):
|
||
print(f"❌ {e}")
|
||
print("💡 Tip: Stop the other instance or wait for it to finish")
|
||
sys.exit(1)
|
||
else:
|
||
print(f"⚠️ Runtime error: {e}")
|
||
retry_count += 1
|
||
|
||
except KeyboardInterrupt:
|
||
print("\n⏸️ Received interrupt signal...")
|
||
graceful_shutdown()
|
||
break
|
||
|
||
except Exception as e:
|
||
print(f"💥 Unexpected error: {e}")
|
||
retry_count += 1
|
||
|
||
if retry_count < max_retries:
|
||
print(f"🔄 Attempting restart ({retry_count}/{max_retries})...")
|
||
time.sleep(2)
|
||
else:
|
||
print("❌ Maximum retries reached. Exiting...")
|
||
graceful_shutdown()
|
||
sys.exit(1)
|
||
|
||
print("🏁 Application finished.")
|
||
|
||
|
||
# ==============================================================================
|
||
# Symbol Management API Endpoints
|
||
# ==============================================================================
|
||
|
||
|
||
@app.route("/api/utils/browse-file", methods=["POST"])
|
||
def browse_file():
|
||
"""Open file dialog to browse for ASC symbol files."""
|
||
try:
|
||
if not TKINTER_AVAILABLE:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"error": "File browser not available. Please enter the file path manually.",
|
||
}
|
||
),
|
||
400,
|
||
)
|
||
|
||
data = request.get_json()
|
||
title = data.get("title", "Select File")
|
||
filetypes = data.get("filetypes", [["All Files", "*.*"]])
|
||
|
||
# Create a temporary tkinter root window
|
||
root = tk.Tk()
|
||
root.withdraw() # Hide the root window
|
||
root.attributes("-topmost", True) # Bring to front
|
||
|
||
# Open file dialog
|
||
file_path = filedialog.askopenfilename(title=title, filetypes=filetypes)
|
||
|
||
root.destroy() # Clean up
|
||
|
||
if file_path:
|
||
return jsonify({"success": True, "file_path": file_path})
|
||
else:
|
||
return jsonify({"success": True, "cancelled": True})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/symbols/load", methods=["POST"])
|
||
def load_symbols():
|
||
"""Load symbols from ASC file and save to JSON."""
|
||
try:
|
||
data = request.get_json()
|
||
asc_file_path = data.get("asc_file_path")
|
||
|
||
if not asc_file_path:
|
||
return (
|
||
jsonify({"success": False, "error": "ASC file path is required"}),
|
||
400,
|
||
)
|
||
|
||
if not os.path.exists(asc_file_path):
|
||
return (
|
||
jsonify(
|
||
{"success": False, "error": f"ASC file not found: {asc_file_path}"}
|
||
),
|
||
404,
|
||
)
|
||
|
||
# Initialize symbol loader with optional logger
|
||
logger = None
|
||
try:
|
||
logger = (
|
||
streamer.event_logger if "streamer" in globals() and streamer else None
|
||
)
|
||
except:
|
||
pass # Use None logger if streamer is not available
|
||
|
||
symbol_loader = SymbolLoader(logger)
|
||
|
||
# Load symbols and save to JSON
|
||
json_output_path = project_path("config", "data", "plc_symbols.json")
|
||
|
||
# Ensure the directory exists
|
||
os.makedirs(os.path.dirname(json_output_path), exist_ok=True)
|
||
|
||
symbols_count = symbol_loader.load_asc_and_save_json(
|
||
asc_file_path, json_output_path
|
||
)
|
||
|
||
# Log the success
|
||
if logger:
|
||
logger.log_event(
|
||
"info",
|
||
"symbols_loaded",
|
||
f"Loaded {symbols_count} symbols from {asc_file_path}",
|
||
)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"symbols_count": symbols_count,
|
||
"json_path": json_output_path,
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
error_msg = f"Error loading symbols: {str(e)}"
|
||
print(f"DEBUG - Symbol loading error: {error_msg}") # Debug print
|
||
print(f"DEBUG - Exception type: {type(e).__name__}")
|
||
|
||
# Print full stack trace
|
||
import traceback
|
||
|
||
print("DEBUG - Full stack trace:")
|
||
traceback.print_exc()
|
||
|
||
# Log the error
|
||
if "streamer" in globals() and streamer and streamer.event_logger:
|
||
streamer.event_logger.log_event("error", "symbols_load_failed", error_msg)
|
||
|
||
return jsonify({"success": False, "error": error_msg}), 500
|
||
|
||
|
||
@app.route("/api/symbols", methods=["GET"])
|
||
def get_symbols():
|
||
"""Get loaded symbols from JSON file."""
|
||
try:
|
||
json_path = project_path("config", "data", "plc_symbols.json")
|
||
|
||
if not os.path.exists(json_path):
|
||
return jsonify({"success": True, "symbols": [], "total_count": 0})
|
||
|
||
with open(json_path, "r", encoding="utf-8") as file:
|
||
symbols_data = json.load(file)
|
||
|
||
return jsonify({"success": True, **symbols_data})
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/symbols/search", methods=["POST"])
|
||
def search_symbols():
|
||
"""Search symbols by name or description."""
|
||
try:
|
||
data = request.get_json()
|
||
query = data.get("query", "").lower()
|
||
limit = data.get("limit", 50)
|
||
|
||
json_path = project_path("config", "data", "plc_symbols.json")
|
||
|
||
if not os.path.exists(json_path):
|
||
return jsonify({"success": True, "symbols": [], "total_count": 0})
|
||
|
||
with open(json_path, "r", encoding="utf-8") as file:
|
||
symbols_data = json.load(file)
|
||
|
||
all_symbols = symbols_data.get("symbols", [])
|
||
|
||
if not query:
|
||
# Return first N symbols if no query
|
||
filtered_symbols = all_symbols[:limit]
|
||
else:
|
||
# Search in name and description
|
||
filtered_symbols = []
|
||
for symbol in all_symbols:
|
||
if (
|
||
query in symbol.get("name", "").lower()
|
||
or query in symbol.get("description", "").lower()
|
||
):
|
||
filtered_symbols.append(symbol)
|
||
|
||
if len(filtered_symbols) >= limit:
|
||
break
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"symbols": filtered_symbols,
|
||
"total_count": len(filtered_symbols),
|
||
"query": query,
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/symbols/process-variables", methods=["POST"])
|
||
def process_symbol_variables():
|
||
"""Process dataset variables, expanding symbol-based ones."""
|
||
try:
|
||
print(f"[DEBUG] process_symbol_variables called")
|
||
|
||
data = request.get_json()
|
||
print(f"[DEBUG] Request data: {data}")
|
||
|
||
variables = data.get("variables", [])
|
||
print(f"[DEBUG] Variables to process: {len(variables)}")
|
||
|
||
if not variables:
|
||
print(f"[DEBUG] No variables provided")
|
||
return (
|
||
jsonify({"success": False, "error": "Variables array is required"}),
|
||
400,
|
||
)
|
||
|
||
# Initialize symbol processor with optional logger
|
||
logger = None
|
||
try:
|
||
logger = (
|
||
streamer.event_logger if "streamer" in globals() and streamer else None
|
||
)
|
||
except Exception as e:
|
||
print(f"[DEBUG] Logger setup failed: {e}")
|
||
pass # Use None logger if streamer is not available
|
||
|
||
print(f"[DEBUG] Creating SymbolProcessor with logger: {logger is not None}")
|
||
symbol_processor = SymbolProcessor(logger)
|
||
|
||
# Get symbols path
|
||
symbols_path = project_path("config", "data", "plc_symbols.json")
|
||
print(f"[DEBUG] Symbols path: {symbols_path}")
|
||
print(f"[DEBUG] Symbols file exists: {os.path.exists(symbols_path)}")
|
||
|
||
if not os.path.exists(symbols_path):
|
||
print(f"[DEBUG] Symbols file not found")
|
||
return (
|
||
jsonify(
|
||
{
|
||
"success": False,
|
||
"error": "No symbols file found. Please load an ASC file first.",
|
||
}
|
||
),
|
||
404,
|
||
)
|
||
|
||
# Process variables
|
||
print(f"[DEBUG] Processing variables...")
|
||
processed_variables = symbol_processor.process_dataset_variables(
|
||
variables, symbols_path
|
||
)
|
||
print(f"[DEBUG] Processed {len(processed_variables)} variables")
|
||
|
||
# Validate the processed variables
|
||
print(f"[DEBUG] Validating variables...")
|
||
validation = symbol_processor.validate_symbol_variables(variables, symbols_path)
|
||
print(f"[DEBUG] Validation result: {validation}")
|
||
|
||
result = {
|
||
"success": True,
|
||
"processed_variables": processed_variables,
|
||
"validation": validation,
|
||
}
|
||
print(
|
||
f"[DEBUG] Returning result with {len(processed_variables)} processed variables"
|
||
)
|
||
return jsonify(result)
|
||
|
||
except Exception as e:
|
||
print(f"[ERROR] Exception in process_symbol_variables: {str(e)}")
|
||
print(f"[ERROR] Exception type: {type(e)}")
|
||
import traceback
|
||
|
||
print(f"[ERROR] Traceback: {traceback.format_exc()}")
|
||
return jsonify({"success": False, "error": str(e)}), 500
|
||
|
||
|
||
# CSV File Browser Endpoints
|
||
@app.route("/api/csv/files", methods=["GET"])
|
||
def get_csv_files():
|
||
"""Get structured list of CSV files organized by dataset, date, and hour"""
|
||
try:
|
||
records_dir = get_records_directory()
|
||
if not os.path.exists(records_dir):
|
||
return jsonify({"files": [], "tree": []})
|
||
|
||
file_tree = []
|
||
files_flat = []
|
||
|
||
# Scan records directory
|
||
for date_dir in os.listdir(records_dir):
|
||
date_path = os.path.join(records_dir, date_dir)
|
||
if not os.path.isdir(date_path):
|
||
continue
|
||
|
||
date_node = {
|
||
"id": f"date_{date_dir}",
|
||
"name": f"📅 {date_dir}",
|
||
"value": date_dir,
|
||
"type": "date",
|
||
"children": [],
|
||
}
|
||
|
||
# Group files by dataset
|
||
datasets = {}
|
||
for filename in os.listdir(date_path):
|
||
if not filename.endswith(".csv"):
|
||
continue
|
||
|
||
file_path = os.path.join(date_path, filename)
|
||
|
||
# Extract dataset name (everything before the first underscore + number)
|
||
dataset_name = filename.split("_")[0]
|
||
if len(filename.split("_")) > 1:
|
||
# If there's a second part, it might be part of dataset name
|
||
parts = filename.split("_")
|
||
if not parts[-1].replace(".csv", "").isdigit():
|
||
dataset_name = "_".join(parts[:-1])
|
||
|
||
# Get file info
|
||
try:
|
||
stat = os.stat(file_path)
|
||
file_size = stat.st_size
|
||
file_mtime = datetime.fromtimestamp(stat.st_mtime)
|
||
|
||
# Try to get CSV info (first 3 columns and row count)
|
||
csv_info = get_csv_file_info(file_path)
|
||
|
||
file_info = {
|
||
"id": f"file_{date_dir}_{filename}",
|
||
"name": f"📊 {filename}",
|
||
"value": filename,
|
||
"type": "file",
|
||
"path": file_path,
|
||
"date": date_dir,
|
||
"dataset": dataset_name,
|
||
"size": file_size,
|
||
"size_human": format_file_size(file_size),
|
||
"modified": file_mtime.isoformat(),
|
||
"modified_human": file_mtime.strftime("%H:%M:%S"),
|
||
**csv_info,
|
||
}
|
||
|
||
files_flat.append(file_info)
|
||
|
||
if dataset_name not in datasets:
|
||
datasets[dataset_name] = {
|
||
"id": f"dataset_{date_dir}_{dataset_name}",
|
||
"name": f"📊 {dataset_name}",
|
||
"value": dataset_name,
|
||
"type": "dataset",
|
||
"children": [],
|
||
}
|
||
|
||
datasets[dataset_name]["children"].append(file_info)
|
||
|
||
except Exception as e:
|
||
print(f"Error processing file {filename}: {e}")
|
||
continue
|
||
|
||
# Add datasets to date node
|
||
date_node["children"] = list(datasets.values())
|
||
if date_node["children"]: # Only add dates that have files
|
||
file_tree.append(date_node)
|
||
|
||
# Sort by date (newest first)
|
||
file_tree.sort(key=lambda x: x["value"], reverse=True)
|
||
|
||
return jsonify(
|
||
{"files": files_flat, "tree": file_tree, "total_files": len(files_flat)}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e), "files": [], "tree": []}), 500
|
||
|
||
|
||
def get_csv_file_info(file_path):
|
||
"""Get CSV file information: first 3 columns and row count"""
|
||
try:
|
||
import csv
|
||
|
||
with open(file_path, "r", encoding="utf-8") as f:
|
||
reader = csv.reader(f)
|
||
header = next(reader, [])
|
||
|
||
# Count rows (approximately, for performance)
|
||
row_count = sum(1 for _ in reader)
|
||
|
||
# Get first 3 column names
|
||
first_columns = header[:3] if header else []
|
||
|
||
return {
|
||
"columns": first_columns,
|
||
"column_count": len(header),
|
||
"row_count": row_count,
|
||
"preview": ", ".join(first_columns[:3]),
|
||
}
|
||
except Exception:
|
||
return {"columns": [], "column_count": 0, "row_count": 0, "preview": "Unknown"}
|
||
|
||
|
||
def format_file_size(size_bytes):
|
||
"""Format file size in human readable format"""
|
||
if size_bytes == 0:
|
||
return "0 B"
|
||
size_names = ["B", "KB", "MB", "GB"]
|
||
import math
|
||
|
||
i = int(math.floor(math.log(size_bytes, 1024)))
|
||
p = math.pow(1024, i)
|
||
s = round(size_bytes / p, 2)
|
||
return f"{s} {size_names[i]}"
|
||
|
||
|
||
@app.route("/api/plotjuggler/launch", methods=["POST"])
|
||
def launch_plotjuggler():
|
||
"""Launch PlotJuggler with selected CSV files"""
|
||
try:
|
||
data = request.get_json()
|
||
file_paths = data.get("files", [])
|
||
|
||
if not file_paths:
|
||
return jsonify({"error": "No files provided"}), 400
|
||
|
||
# Get PlotJuggler path from system state
|
||
plotjuggler_path = get_plotjuggler_path()
|
||
if not plotjuggler_path:
|
||
return jsonify({"error": "PlotJuggler not found"}), 404
|
||
|
||
# Launch PlotJuggler with files
|
||
import subprocess
|
||
|
||
if len(file_paths) == 1:
|
||
# Single file - use --datafile for compatibility
|
||
cmd = [plotjuggler_path, "--nosplash", "--datafile", file_paths[0]]
|
||
else:
|
||
# Multiple files - use single instance with multiple -d flags
|
||
cmd = [plotjuggler_path, "-n"] # -n for no splash
|
||
for file_path in file_paths:
|
||
cmd.extend(["-d", file_path])
|
||
|
||
# Execute command
|
||
subprocess.Popen(cmd, shell=True)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"PlotJuggler launched with {len(file_paths)} file(s)",
|
||
"command": " ".join([f'"{arg}"' if " " in arg else arg for arg in cmd]),
|
||
"files_count": len(file_paths),
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plotjuggler/launch-streamer", methods=["POST"])
|
||
def launch_plotjuggler_streamer():
|
||
"""Launch PlotJuggler with UDP streamer configured"""
|
||
try:
|
||
# Get PlotJuggler path from system state
|
||
plotjuggler_path = get_plotjuggler_path()
|
||
if not plotjuggler_path:
|
||
return jsonify({"error": "PlotJuggler not found"}), 404
|
||
|
||
# Launch PlotJuggler with UDP streamer parameters
|
||
import subprocess
|
||
|
||
cmd = [
|
||
plotjuggler_path,
|
||
"-n", # no recent files menu
|
||
"--start_streamer",
|
||
"UDP streamer",
|
||
]
|
||
|
||
subprocess.Popen(cmd, shell=True)
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": "PlotJuggler launched with UDP streamer",
|
||
"command": " ".join(cmd),
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plotjuggler/path", methods=["GET"])
|
||
def get_plotjuggler_path_endpoint():
|
||
"""Get PlotJuggler executable path"""
|
||
try:
|
||
path = get_plotjuggler_path()
|
||
return jsonify({"path": path, "found": path is not None})
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
@app.route("/api/plotjuggler/path", methods=["POST"])
|
||
def set_plotjuggler_path():
|
||
"""Set PlotJuggler executable path"""
|
||
try:
|
||
data = request.get_json()
|
||
path = data.get("path", "")
|
||
|
||
if not path or not os.path.exists(path):
|
||
return jsonify({"error": "Invalid path provided"}), 400
|
||
|
||
# Save to system state
|
||
save_plotjuggler_path(path)
|
||
|
||
return jsonify(
|
||
{"success": True, "message": "PlotJuggler path saved", "path": path}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
def get_plotjuggler_path():
|
||
"""Get PlotJuggler executable path, search if not found"""
|
||
try:
|
||
# Load from system state
|
||
state_file = external_path("system_state.json")
|
||
if os.path.exists(state_file):
|
||
with open(state_file, "r") as f:
|
||
state = json.load(f)
|
||
saved_path = state.get("plotjuggler_path")
|
||
if saved_path and os.path.exists(saved_path):
|
||
return saved_path
|
||
|
||
# Search for PlotJuggler in common installation paths
|
||
search_paths = [
|
||
# Standard installation paths for different Windows versions
|
||
r"C:\Program Files\PlotJuggler\plotjuggler.exe",
|
||
r"C:\Program Files (x86)\PlotJuggler\plotjuggler.exe",
|
||
# Alternative installation locations
|
||
r"C:\PlotJuggler\plotjuggler.exe",
|
||
r"C:\Tools\PlotJuggler\plotjuggler.exe",
|
||
# User-specific installations
|
||
os.path.expanduser(r"~\AppData\Local\PlotJuggler\plotjuggler.exe"),
|
||
os.path.expanduser(r"~\PlotJuggler\plotjuggler.exe"),
|
||
# Portable installations in common locations
|
||
r"D:\PlotJuggler\plotjuggler.exe",
|
||
r"E:\PlotJuggler\plotjuggler.exe",
|
||
]
|
||
|
||
# Also search in PATH environment variable
|
||
import shutil
|
||
|
||
path_executable = shutil.which("plotjuggler")
|
||
if path_executable:
|
||
search_paths.insert(0, path_executable) # Give priority to PATH
|
||
|
||
# Search through all paths
|
||
backend_logger.info(
|
||
f"Searching for PlotJuggler in {len(search_paths)} locations..."
|
||
)
|
||
for i, path in enumerate(search_paths):
|
||
backend_logger.debug(f" [{i+1}] Checking: {path}")
|
||
if os.path.exists(path):
|
||
backend_logger.info(f"✅ PlotJuggler found at: {path}")
|
||
save_plotjuggler_path(path)
|
||
return path
|
||
else:
|
||
backend_logger.debug(f" ❌ Not found: {path}")
|
||
|
||
backend_logger.warning("❌ PlotJuggler not found in any standard location")
|
||
return None
|
||
|
||
except Exception as e:
|
||
print(f"Error getting PlotJuggler path: {e}")
|
||
return None
|
||
|
||
|
||
def save_plotjuggler_path(path):
|
||
"""Save PlotJuggler path to system state"""
|
||
try:
|
||
state_file = external_path("system_state.json")
|
||
state = {}
|
||
|
||
if os.path.exists(state_file):
|
||
with open(state_file, "r") as f:
|
||
state = json.load(f)
|
||
|
||
state["plotjuggler_path"] = path
|
||
state["last_update"] = datetime.now().isoformat()
|
||
|
||
with open(state_file, "w") as f:
|
||
json.dump(state, f, indent=4)
|
||
|
||
except Exception as e:
|
||
print(f"Error saving PlotJuggler path: {e}")
|
||
|
||
|
||
@app.route("/api/csv/open-excel", methods=["POST"])
|
||
def open_csv_in_excel():
|
||
"""Open CSV file in Excel (or default spreadsheet app)"""
|
||
try:
|
||
data = request.get_json()
|
||
file_path = data.get("file_path", "")
|
||
|
||
if not file_path or not os.path.exists(file_path):
|
||
return jsonify({"error": "File not found"}), 404
|
||
|
||
import subprocess
|
||
import platform
|
||
|
||
system = platform.system()
|
||
if system == "Windows":
|
||
# Use os.startfile on Windows
|
||
os.startfile(file_path)
|
||
elif system == "Darwin": # macOS
|
||
subprocess.Popen(["open", file_path])
|
||
else: # Linux
|
||
subprocess.Popen(["xdg-open", file_path])
|
||
|
||
return jsonify(
|
||
{
|
||
"success": True,
|
||
"message": f"Opened {os.path.basename(file_path)} in default application",
|
||
}
|
||
)
|
||
|
||
except Exception as e:
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
|
||
if __name__ == "__main__":
|
||
print(f"🚀 Starting PLC S7-315 Streamer & Logger...")
|
||
print(f"🐍 Process PID: {os.getpid()}")
|
||
|
||
# <20> ROBUST INSTANCE CHECK - HTTP + PID based verification
|
||
print("=" * 60)
|
||
can_proceed, check_message = check_backend_instance_robust(port=5050)
|
||
print("=" * 60)
|
||
|
||
if not can_proceed:
|
||
print(f"❌ Startup aborted: {check_message}")
|
||
print(
|
||
"💡 Tip: If you believe this is an error, check Task Manager for python.exe processes"
|
||
)
|
||
# input("\nPress Enter to exit...")
|
||
sys.exit(1)
|
||
|
||
try:
|
||
# Initialize streamer instance
|
||
print("🔧 Initializing PLCDataStreamer...")
|
||
streamer = PLCDataStreamer()
|
||
|
||
print("📊 Initializing Historical Cache...")
|
||
# Pass the backend_logger to HistoricalDataCache
|
||
historical_cache = HistoricalDataCache(backend_logger)
|
||
|
||
print("✅ Backend initialization complete")
|
||
print(f"🌐 Starting Flask server on port 5050...")
|
||
main()
|
||
|
||
except RuntimeError as e:
|
||
if "Another instance" in str(e):
|
||
print("🚫 Another instance of the application is already running.")
|
||
print("💡 Please stop the other instance first or wait for it to finish.")
|
||
print(f"📋 Error details: {e}")
|
||
print("\n🔍 You can check Task Manager for 'python.exe' processes")
|
||
print(
|
||
"🔍 Or wait a few seconds and try again if the other instance is closing"
|
||
)
|
||
input("\nPress Enter to exit...")
|
||
else:
|
||
print(f"💥 Runtime error during initialization: {e}")
|
||
input("Press Enter to exit...")
|
||
sys.exit(1)
|
||
|
||
except Exception as e:
|
||
print(f"💥 Critical error during initialization: {e}")
|
||
import traceback
|
||
|
||
traceback.print_exc()
|
||
input("Press Enter to exit...")
|
||
sys.exit(1)
|