Enhance dataset variable handling and expand symbolic variables
- Updated dataset_variables.json to include new variables and adjust configurations. - Modified plot_variables.json to add visualization settings for new variables. - Enhanced ConfigManager to support expansion of symbolic variables, improving variable management. - Updated PLCClient to handle additional memory types for variable reading. - Refactored VariableSelectorWidget to utilize expanded dataset variables for better selection. - Added new API endpoint to retrieve expanded dataset variables. - Adjusted system_state.json to reflect changes in active datasets and last update timestamp.
This commit is contained in:
parent
31cb5cc515
commit
fd8efecae4
File diff suppressed because it is too large
Load Diff
|
@ -4,8 +4,8 @@
|
|||
"dataset_id": "DAR",
|
||||
"variables": [
|
||||
{
|
||||
"configType": "manual",
|
||||
"area": "db",
|
||||
"configType": "manual",
|
||||
"db": 1011,
|
||||
"name": "UR29_Brix",
|
||||
"offset": 1322,
|
||||
|
@ -13,8 +13,8 @@
|
|||
"type": "real"
|
||||
},
|
||||
{
|
||||
"configType": "manual",
|
||||
"area": "db",
|
||||
"configType": "manual",
|
||||
"db": 1011,
|
||||
"name": "UR29_ma",
|
||||
"offset": 1296,
|
||||
|
@ -29,9 +29,9 @@
|
|||
{
|
||||
"configType": "symbol",
|
||||
"area": "db",
|
||||
"type": "real",
|
||||
"streaming": false,
|
||||
"symbol": "FTP302_Brix"
|
||||
"streaming": true,
|
||||
"symbol": "AUX Blink_1.0S",
|
||||
"type": "real"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -18,6 +18,13 @@
|
|||
"line_width": 2,
|
||||
"y_axis": "left",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"variable_name": "AUX Blink_1.0S",
|
||||
"color": "#3498db",
|
||||
"line_width": 2,
|
||||
"y_axis": "left",
|
||||
"enabled": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -3,6 +3,13 @@ import os
|
|||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
# Try relative imports first (when used as a package)
|
||||
from ..utils.symbol_processor import SymbolProcessor
|
||||
except ImportError:
|
||||
# Fallback to absolute imports (when run directly)
|
||||
from utils.symbol_processor import SymbolProcessor
|
||||
|
||||
|
||||
def resource_path(relative_path):
|
||||
"""Get absolute path to resource, works for dev and for PyInstaller"""
|
||||
|
@ -52,6 +59,10 @@ class ConfigManager:
|
|||
"max_days": 30, # Maximum days to keep files
|
||||
}
|
||||
|
||||
# Symbol processor for expanding symbolic variables
|
||||
self.symbol_processor = SymbolProcessor(logger)
|
||||
self.symbols_file = resource_path(os.path.join(data_dir, "plc_symbols.json"))
|
||||
|
||||
# Datasets management
|
||||
self.datasets = {} # Dictionary of dataset_id -> dataset_config
|
||||
self.active_datasets = set() # Set of active dataset IDs
|
||||
|
@ -223,8 +234,17 @@ class ConfigManager:
|
|||
streaming_variables = []
|
||||
|
||||
for var in variables_list:
|
||||
# Handle symbolic variables by expanding them first
|
||||
if var.get("configType") == "symbol":
|
||||
var = self._expand_symbolic_variable(var)
|
||||
if var is None:
|
||||
# Skip if symbol expansion failed
|
||||
continue
|
||||
|
||||
var_name = var.get("name")
|
||||
if not var_name:
|
||||
if self.logger:
|
||||
self.logger.warning(f"Skipping variable without name in dataset {dataset_id}: {var}")
|
||||
continue
|
||||
|
||||
# Build variable config (remove name since it's the key)
|
||||
|
@ -734,3 +754,47 @@ class ConfigManager:
|
|||
for dataset_id, info in self.datasets.items()
|
||||
},
|
||||
}
|
||||
|
||||
def _expand_symbolic_variable(self, var_config):
|
||||
"""
|
||||
Expand a symbolic variable configuration into manual configuration.
|
||||
|
||||
Args:
|
||||
var_config: Variable configuration with 'symbol' field
|
||||
|
||||
Returns:
|
||||
Expanded variable configuration or None if symbol not found
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists(self.symbols_file):
|
||||
if self.logger:
|
||||
self.logger.warning(f"Symbols file not found: {self.symbols_file}")
|
||||
return None
|
||||
|
||||
# Load symbols data
|
||||
symbols_data = self.symbol_processor.load_symbols(self.symbols_file)
|
||||
|
||||
# Expand the symbolic variable
|
||||
expanded_var = self.symbol_processor.expand_symbol_variable(var_config, symbols_data)
|
||||
|
||||
# If no name was provided, use the symbol name
|
||||
if not expanded_var.get("name") and var_config.get("symbol"):
|
||||
expanded_var["name"] = var_config.get("symbol")
|
||||
|
||||
# Keep the configType as "symbol" for reference but make it functional
|
||||
expanded_var["configType"] = "symbol"
|
||||
|
||||
if self.logger:
|
||||
symbol_name = var_config.get("symbol")
|
||||
var_name = expanded_var.get("name", symbol_name)
|
||||
area = expanded_var.get("area", "unknown")
|
||||
offset = expanded_var.get("offset", "unknown")
|
||||
self.logger.info(f"Expanded symbolic variable '{symbol_name}' -> {var_name} ({area}:{offset})")
|
||||
|
||||
return expanded_var
|
||||
|
||||
except Exception as e:
|
||||
if self.logger:
|
||||
symbol_name = var_config.get("symbol", "unknown")
|
||||
self.logger.error(f"Failed to expand symbolic variable '{symbol_name}': {e}")
|
||||
return None
|
||||
|
|
|
@ -275,16 +275,20 @@ class PLCClient:
|
|||
var_type,
|
||||
bit,
|
||||
)
|
||||
elif area_type in ["mw", "m"]:
|
||||
elif area_type in ["mw", "m", "md", "mb"]: # Memory Word, Memory, Memory Double, Memory Byte
|
||||
result = self._read_memory_variable(offset, var_type)
|
||||
elif area_type in [
|
||||
"pew",
|
||||
"pe",
|
||||
"ped", # Process Input Double word (REAL)
|
||||
"peb", # Process Input Byte
|
||||
]:
|
||||
result = self._read_input_variable(offset, var_type)
|
||||
elif area_type in [
|
||||
"paw",
|
||||
"pa",
|
||||
"pad", # Process Output Double word (REAL)
|
||||
"pab", # Process Output Byte
|
||||
]:
|
||||
result = self._read_output_variable(offset, var_type)
|
||||
elif area_type == "e":
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
Text, Badge, Box, Icon, Input, useColorModeValue, Spinner, IconButton, Tooltip
|
||||
} from '@chakra-ui/react'
|
||||
import { SearchIcon, RepeatIcon } from '@chakra-ui/icons'
|
||||
import { readConfig } from '../../services/api.js'
|
||||
import { readConfig, readExpandedDatasetVariables } from '../../services/api.js'
|
||||
import { useVariableContext } from '../../contexts/VariableContext.jsx'
|
||||
|
||||
// Widget for selecting existing dataset variables with filtering and search
|
||||
|
@ -42,8 +42,8 @@ export function VariableSelectorWidget(props) {
|
|||
const loadDatasetVariables = useCallback(async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
const response = await readConfig('dataset-variables')
|
||||
// Handle the array-based structure: { variables: [{dataset_id, variables: [...]}] }
|
||||
const response = await readExpandedDatasetVariables()
|
||||
// Handle the array-based structure: { variables: [{dataset_id, variables: {...}}] }
|
||||
const datasetVariablesArray = response?.variables || []
|
||||
|
||||
// Convert array format to object format for easier processing
|
||||
|
@ -51,14 +51,8 @@ export function VariableSelectorWidget(props) {
|
|||
datasetVariablesArray.forEach(item => {
|
||||
if (item.dataset_id && item.variables) {
|
||||
datasetVariablesObj[item.dataset_id] = {
|
||||
variables: {}
|
||||
variables: item.variables // Already in object format from expanded endpoint
|
||||
}
|
||||
// Convert variables array to object with variable name as key
|
||||
item.variables.forEach(variable => {
|
||||
if (variable.name) {
|
||||
datasetVariablesObj[item.dataset_id].variables[variable.name] = variable
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
@ -83,6 +83,13 @@ export async function readConfig(configId) {
|
|||
return response.data || response
|
||||
}
|
||||
|
||||
export async function readExpandedDatasetVariables() {
|
||||
const res = await fetch(`${BASE_URL}/api/config/dataset-variables/expanded`, { headers: { 'Accept': 'application/json' } })
|
||||
const response = await toJsonOrThrow(res)
|
||||
// The API returns { success: true, data: { variables: [...] } }
|
||||
return response.data || response
|
||||
}
|
||||
|
||||
export async function writeConfig(configId, data) {
|
||||
const res = await fetch(`${BASE_URL}/api/config/${encodeURIComponent(configId)}`, {
|
||||
method: 'PUT',
|
||||
|
|
25
main.py
25
main.py
|
@ -277,6 +277,31 @@ def reload_config(config_id):
|
|||
return jsonify({"success": False, "error": str(e)}), 500
|
||||
|
||||
|
||||
@app.route("/api/config/dataset-variables/expanded", methods=["GET"])
|
||||
def get_expanded_dataset_variables():
|
||||
"""Get dataset variables with symbolic variables expanded."""
|
||||
error_response = check_streamer_initialized()
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
try:
|
||||
# Get the expanded variables from the streamer's config manager
|
||||
expanded_data = {"variables": []}
|
||||
|
||||
if hasattr(streamer.config_manager, 'datasets') and streamer.config_manager.datasets:
|
||||
for dataset_id, dataset_config in streamer.config_manager.datasets.items():
|
||||
if 'variables' in dataset_config and dataset_config['variables']:
|
||||
expanded_data["variables"].append({
|
||||
"dataset_id": dataset_id,
|
||||
"variables": dataset_config['variables']
|
||||
})
|
||||
|
||||
return jsonify({"success": True, "data": expanded_data})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({"success": False, "error": str(e)}), 500
|
||||
|
||||
|
||||
# ==============================
|
||||
# Operational API (PLC Control, Streaming, etc.)
|
||||
# ==============================
|
||||
|
|
|
@ -4,10 +4,10 @@
|
|||
"should_stream": true,
|
||||
"active_datasets": [
|
||||
"Fast",
|
||||
"Test",
|
||||
"DAR"
|
||||
"DAR",
|
||||
"Test"
|
||||
]
|
||||
},
|
||||
"auto_recovery_enabled": true,
|
||||
"last_update": "2025-08-14T17:32:09.169720"
|
||||
"last_update": "2025-08-14T18:26:34.411771"
|
||||
}
|
Loading…
Reference in New Issue