feat: Enhance logging and error handling in symbol processing, update system state and dataset variables, and add symbol data expansion functionality

This commit is contained in:
Miguel 2025-08-15 21:05:58 +02:00
parent e7cee49b1e
commit 0f2b9b8fb4
6 changed files with 487 additions and 27 deletions

View File

@ -7884,8 +7884,262 @@
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-15T20:48:58.534574",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-15T20:48:58.585203",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: DAR",
"details": {
"dataset_id": "DAR",
"variables_count": 2,
"streaming_count": 2,
"prefix": "gateway_phoenix"
}
},
{
"timestamp": "2025-08-15T20:48:58.598180",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: Fast",
"details": {
"dataset_id": "Fast",
"variables_count": 2,
"streaming_count": 2,
"prefix": "fast"
}
},
{
"timestamp": "2025-08-15T20:48:58.614081",
"level": "info",
"event_type": "csv_recording_started",
"message": "CSV recording started: 2 datasets activated",
"details": {
"activated_datasets": 2,
"total_datasets": 3
}
},
{
"timestamp": "2025-08-15T20:48:58.626508",
"level": "info",
"event_type": "udp_streaming_started",
"message": "UDP streaming to PlotJuggler started",
"details": {
"udp_host": "127.0.0.1",
"udp_port": 9870,
"datasets_available": 3
}
},
{
"timestamp": "2025-08-15T20:50:32.769698",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-15T20:50:32.820757",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: DAR",
"details": {
"dataset_id": "DAR",
"variables_count": 2,
"streaming_count": 2,
"prefix": "gateway_phoenix"
}
},
{
"timestamp": "2025-08-15T20:50:32.831299",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: Fast",
"details": {
"dataset_id": "Fast",
"variables_count": 2,
"streaming_count": 2,
"prefix": "fast"
}
},
{
"timestamp": "2025-08-15T20:50:32.843238",
"level": "info",
"event_type": "csv_recording_started",
"message": "CSV recording started: 2 datasets activated",
"details": {
"activated_datasets": 2,
"total_datasets": 3
}
},
{
"timestamp": "2025-08-15T20:50:32.854102",
"level": "info",
"event_type": "udp_streaming_started",
"message": "UDP streaming to PlotJuggler started",
"details": {
"udp_host": "127.0.0.1",
"udp_port": 9870,
"datasets_available": 3
}
},
{
"timestamp": "2025-08-15T20:52:04.048074",
"level": "info",
"event_type": "csv_recording_stopped",
"message": "CSV recording stopped (dataset threads continue for UDP streaming)",
"details": {}
},
{
"timestamp": "2025-08-15T20:52:04.060239",
"level": "info",
"event_type": "udp_streaming_stopped",
"message": "UDP streaming to PlotJuggler stopped (CSV recording continues)",
"details": {}
},
{
"timestamp": "2025-08-15T20:52:04.617441",
"level": "info",
"event_type": "dataset_deactivated",
"message": "Dataset deactivated: Fast",
"details": {
"dataset_id": "Fast"
}
},
{
"timestamp": "2025-08-15T20:52:04.627037",
"level": "info",
"event_type": "dataset_deactivated",
"message": "Dataset deactivated: test",
"details": {
"dataset_id": "Test"
}
},
{
"timestamp": "2025-08-15T20:52:04.872413",
"level": "info",
"event_type": "dataset_deactivated",
"message": "Dataset deactivated: DAR",
"details": {
"dataset_id": "DAR"
}
},
{
"timestamp": "2025-08-15T20:52:04.882404",
"level": "info",
"event_type": "plc_disconnection",
"message": "Disconnected from PLC 10.1.33.11 (stopped recording and streaming)",
"details": {}
},
{
"timestamp": "2025-08-15T20:53:50.987335",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-15T20:54:20.001933",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-15T20:55:53.633692",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-15T20:56:02.602017",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T20:56:41.732947",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T20:59:14.451792",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T20:59:24.125019",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:00:09.914702",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:00:34.480713",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:00:54.044314",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:02:01.787851",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:02:09.080032",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:03:39.916462",
"level": "info",
"event_type": "symbols_loaded",
"message": "Loaded 2077 symbols",
"details": {}
},
{
"timestamp": "2025-08-15T21:03:44.900837",
"level": "info",
"event_type": "config_reload",
"message": "Dataset configuration reloaded from files with CSV header validation",
"details": {
"datasets_count": 3,
"active_datasets_count": 3,
"csv_recording_active": false
}
}
],
"last_updated": "2025-08-15T20:37:44.200561",
"total_entries": 645
"last_updated": "2025-08-15T21:03:44.900837",
"total_entries": 675
}

View File

@ -34,11 +34,13 @@
"type": "real"
},
{
"configType": "symbol",
"area": "db",
"configType": "manual",
"area": "m",
"streaming": true,
"symbol": "AUX Blink_1.6S",
"type": "real"
"type": "bool",
"name": "AUX Blink_1.6S",
"offset": 0,
"bit": 6
}
]
}

View File

@ -965,6 +965,161 @@ function DatasetManager() {
}
}
// Function to expand symbol data using backend API
const expandSymbolToManualConfig = async (symbolName, currentVariable = {}) => {
try {
// Create a temporary variable array with just this symbol
const tempVariables = [{
symbol: symbolName,
streaming: currentVariable.streaming || false,
configType: "symbol"
}]
// Call backend API to process the symbol
const response = await fetch('/api/symbols/process-variables', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
variables: tempVariables
})
})
const result = await response.json()
if (result.success && result.processed_variables.length > 0) {
const processedVar = result.processed_variables[0]
// Build the configuration object, only including relevant fields
const config = {
name: processedVar.name || symbolName,
area: processedVar.area || "db",
offset: processedVar.offset !== undefined && processedVar.offset !== null ? processedVar.offset : 0,
type: processedVar.type || "real",
streaming: currentVariable.streaming || false
}
// Only include db field if it's actually present and area requires it
if (processedVar.db !== undefined && processedVar.db !== null) {
config.db = processedVar.db
} else if (config.area === "db") {
// Default to 1 only for DB area if no DB number was provided
config.db = 1
}
// Only include bit field if it's actually present
if (processedVar.bit !== undefined && processedVar.bit !== null) {
config.bit = processedVar.bit
} else {
// Default to 0 for bit position when not specified
config.bit = 0
}
return config
} else {
// If backend processing failed, return basic defaults
const fallbackConfig = {
name: currentVariable.name || symbolName,
area: "db", // Default to DB area
offset: 0,
type: "real",
bit: 0,
streaming: currentVariable.streaming || false
}
// Only add db field for DB area
if (fallbackConfig.area === "db") {
fallbackConfig.db = 1
}
return fallbackConfig
}
} catch (error) {
console.error('Error expanding symbol:', error)
// Return basic defaults on error
const errorConfig = {
name: currentVariable.name || symbolName,
area: "db", // Default to DB area
offset: 0,
type: "real",
bit: 0,
streaming: currentVariable.streaming || false
}
// Only add db field for DB area
if (errorConfig.area === "db") {
errorConfig.db = 1
}
return errorConfig
}
}
// Custom onChange handler that detects configType changes and auto-fills data
const handleFormChange = async ({ formData }) => {
// Check if there are variables and if any configType changed from symbol to manual
if (formData?.variables && selectedDatasetVars?.variables) {
const updatedVariables = []
let hasSymbolToManualChange = false
for (let index = 0; index < formData.variables.length; index++) {
const newVar = formData.variables[index]
const oldVar = selectedDatasetVars.variables[index]
// Detect if configType changed from "symbol" to "manual"
if (oldVar?.configType === "symbol" &&
newVar?.configType === "manual" &&
oldVar?.symbol) {
hasSymbolToManualChange = true
try {
// Auto-fill manual fields from symbol data using backend API
const symbolData = await expandSymbolToManualConfig(oldVar.symbol, oldVar)
// Add the variable with auto-filled data
updatedVariables.push({
...newVar,
...symbolData,
configType: "manual", // Ensure configType is set correctly
symbol: undefined // Clear symbol field to avoid confusion
})
} catch (error) {
console.error('Error expanding symbol:', error)
// Fallback: add variable as-is on error
updatedVariables.push(newVar)
}
} else {
// For other cases, return the variable as-is
updatedVariables.push(newVar)
}
}
if (hasSymbolToManualChange) {
// Show toast notification about the auto-fill
toast({
title: '🔄 Auto-filled from symbol',
description: 'Symbol data has been copied to manual configuration fields',
status: 'success',
duration: 3000
})
}
// Update with the modified variables
const updatedFormData = {
...formData,
variables: updatedVariables
}
updateSelectedDatasetVariables(updatedFormData)
} else {
// Normal update without special processing
updateSelectedDatasetVariables(formData)
}
}
return (
<Form
schema={singleDatasetSchema}
@ -980,7 +1135,14 @@ function DatasetManager() {
triggerVariableRefresh()
})
}}
onChange={({ formData }) => updateSelectedDatasetVariables(formData)}
onChange={({ formData }) => {
// Call the async handler
handleFormChange({ formData }).catch(error => {
console.error('Error in form change handler:', error)
// Fallback to normal update on error
updateSelectedDatasetVariables(formData)
})
}}
>
<HStack spacing={2} mt={4}>
<Button type="submit" colorScheme="blue">

36
main.py
View File

@ -2768,10 +2768,16 @@ def search_symbols():
def process_symbol_variables():
"""Process dataset variables, expanding symbol-based ones."""
try:
print(f"[DEBUG] process_symbol_variables called")
data = request.get_json()
print(f"[DEBUG] Request data: {data}")
variables = data.get("variables", [])
print(f"[DEBUG] Variables to process: {len(variables)}")
if not variables:
print(f"[DEBUG] No variables provided")
return (
jsonify({"success": False, "error": "Variables array is required"}),
400,
@ -2783,15 +2789,20 @@ def process_symbol_variables():
logger = (
streamer.event_logger if "streamer" in globals() and streamer else None
)
except:
except Exception as e:
print(f"[DEBUG] Logger setup failed: {e}")
pass # Use None logger if streamer is not available
print(f"[DEBUG] Creating SymbolProcessor with logger: {logger is not None}")
symbol_processor = SymbolProcessor(logger)
# Get symbols path
symbols_path = project_path("config", "data", "plc_symbols.json")
print(f"[DEBUG] Symbols path: {symbols_path}")
print(f"[DEBUG] Symbols file exists: {os.path.exists(symbols_path)}")
if not os.path.exists(symbols_path):
print(f"[DEBUG] Symbols file not found")
return (
jsonify(
{
@ -2803,22 +2814,33 @@ def process_symbol_variables():
)
# Process variables
print(f"[DEBUG] Processing variables...")
processed_variables = symbol_processor.process_dataset_variables(
variables, symbols_path
)
print(f"[DEBUG] Processed {len(processed_variables)} variables")
# Validate the processed variables
print(f"[DEBUG] Validating variables...")
validation = symbol_processor.validate_symbol_variables(variables, symbols_path)
print(f"[DEBUG] Validation result: {validation}")
return jsonify(
{
"success": True,
"processed_variables": processed_variables,
"validation": validation,
}
result = {
"success": True,
"processed_variables": processed_variables,
"validation": validation,
}
print(
f"[DEBUG] Returning result with {len(processed_variables)} processed variables"
)
return jsonify(result)
except Exception as e:
print(f"[ERROR] Exception in process_symbol_variables: {str(e)}")
print(f"[ERROR] Exception type: {type(e)}")
import traceback
print(f"[ERROR] Traceback: {traceback.format_exc()}")
return jsonify({"success": False, "error": str(e)}), 500

View File

@ -1,14 +1,10 @@
{
"last_state": {
"should_connect": true,
"should_stream": true,
"active_datasets": [
"DAR",
"Test",
"Fast"
]
"should_connect": false,
"should_stream": false,
"active_datasets": []
},
"auto_recovery_enabled": true,
"last_update": "2025-08-15T20:37:12.670764",
"last_update": "2025-08-15T20:58:45.862859",
"plotjuggler_path": "C:\\Program Files\\PlotJuggler\\plotjuggler.exe"
}

View File

@ -33,13 +33,29 @@ class SymbolProcessor:
self._symbols_cache_path = symbols_path
if self.logger:
self.logger.info(
f"Loaded {len(self._symbols_cache.get('symbols', []))} symbols"
)
# Check if logger is EventLogger (has log_event method) or standard logger
if hasattr(self.logger, 'log_event'):
self.logger.log_event(
'info',
'symbols_loaded',
f"Loaded {len(self._symbols_cache.get('symbols', []))} symbols"
)
else:
self.logger.info(
f"Loaded {len(self._symbols_cache.get('symbols', []))} symbols"
)
except Exception as e:
if self.logger:
self.logger.error(f"Error loading symbols: {str(e)}")
# Check if logger is EventLogger (has log_event method) or standard logger
if hasattr(self.logger, 'log_event'):
self.logger.log_event(
'error',
'symbols_load_error',
f"Error loading symbols: {str(e)}"
)
else:
self.logger.error(f"Error loading symbols: {str(e)}")
self._symbols_cache = {"symbols": [], "total_count": 0}
return self._symbols_cache
@ -81,7 +97,15 @@ class SymbolProcessor:
symbol = self.find_symbol(symbol_name, symbols_data)
if not symbol:
if self.logger:
self.logger.warning(f"Symbol '{symbol_name}' not found")
# Check if logger is EventLogger (has log_event method) or standard logger
if hasattr(self.logger, 'log_event'):
self.logger.log_event(
'warning',
'symbol_not_found',
f"Symbol '{symbol_name}' not found"
)
else:
self.logger.warning(f"Symbol '{symbol_name}' not found")
return variable_config
# Create expanded configuration