Refactor dataset and plot management to support new array format
- Updated dataset definitions to use a sampling interval of 0.5 seconds. - Changed plot definitions to reduce the time window to 25 seconds and added a new plot for "Brix". - Removed deprecated variable configurations from plot variables. - Refactored ConfigManager to load datasets and variables from new array format, eliminating legacy save methods. - Updated PLCDataStreamer and PlotManager to reflect changes in dataset and plot management, removing automatic save calls. - Enhanced ChartjsPlot component to handle variable configurations and session management more efficiently. - Improved PlotRealtimeSession to ensure backend commands are verified before applying local state changes. - Adjusted system state to reflect active datasets and connection status.
This commit is contained in:
parent
748e8d5b0e
commit
2845d71efe
|
@ -1,5 +1,11 @@
|
|||
# PLC S7-315 Streamer & Logger - AI Coding Guide
|
||||
|
||||
## Workingflow
|
||||
|
||||
I m usign npm run dev so there is no need to build react
|
||||
Also restart flask every time there is any modification to reset the application
|
||||
So for testing the app now is running on http://localhost:5173/app with vite doing proxy
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
This is a **dual-stack industrial automation system** for Siemens S7-315 PLCs combining Python backend orchestration with React frontend controls:
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -6,7 +6,7 @@
|
|||
"id": "DAR",
|
||||
"name": "DAR",
|
||||
"prefix": "gateway_phoenix",
|
||||
"sampling_interval": 1.01
|
||||
"sampling_interval": 0.5
|
||||
},
|
||||
{
|
||||
"created": "2025-08-09T02:06:26.840011",
|
||||
|
|
|
@ -4,12 +4,20 @@
|
|||
"id": "plot_1",
|
||||
"name": "UR29",
|
||||
"session_id": "plot_1",
|
||||
"time_window": 75,
|
||||
"time_window": 25,
|
||||
"trigger_enabled": false,
|
||||
"trigger_on_true": true,
|
||||
"trigger_variable": null,
|
||||
"y_max": null,
|
||||
"y_min": null
|
||||
},
|
||||
{
|
||||
"id": "Brix",
|
||||
"name": "Brix",
|
||||
"session_id": "Brix",
|
||||
"time_window": 60,
|
||||
"trigger_enabled": false,
|
||||
"trigger_on_true": true
|
||||
}
|
||||
]
|
||||
}
|
|
@ -12,16 +12,6 @@
|
|||
"color": "#e74c3c",
|
||||
"enabled": true,
|
||||
"variable_name": "UR29_ma"
|
||||
},
|
||||
{
|
||||
"color": "#2ecc71",
|
||||
"enabled": true,
|
||||
"variable_name": "fUR29_Brix"
|
||||
},
|
||||
{
|
||||
"color": "#f39c12",
|
||||
"enabled": true,
|
||||
"variable_name": "fUR29_ma"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -173,40 +173,81 @@ class ConfigManager:
|
|||
self.logger.error(f"Error loading datasets: {e}")
|
||||
|
||||
def _load_datasets_separated(self):
|
||||
"""Load datasets from separated definition and variable files"""
|
||||
"""Load datasets from separated definition and variable files (new array format)"""
|
||||
try:
|
||||
# Load definitions
|
||||
# Load definitions (new array format: {"datasets": [array]})
|
||||
with open(self.dataset_definitions_file, "r") as f:
|
||||
definitions_data = json.load(f)
|
||||
|
||||
# Load variables
|
||||
# Load variables (new array format: {"variables": [array]})
|
||||
with open(self.dataset_variables_file, "r") as f:
|
||||
variables_data = json.load(f)
|
||||
|
||||
# Merge data back to legacy format for compatibility
|
||||
# Convert new array format to internal dictionary format for compatibility
|
||||
self.datasets = {}
|
||||
dataset_defs = definitions_data.get("datasets", {})
|
||||
dataset_vars = variables_data.get("dataset_variables", {})
|
||||
|
||||
for dataset_id, definition in dataset_defs.items():
|
||||
variables_info = dataset_vars.get(dataset_id, {})
|
||||
self.datasets[dataset_id] = {
|
||||
**definition,
|
||||
"variables": variables_info.get("variables", {}),
|
||||
"streaming_variables": variables_info.get(
|
||||
"streaming_variables", []
|
||||
),
|
||||
}
|
||||
# Process dataset definitions array
|
||||
dataset_defs_array = definitions_data.get("datasets", [])
|
||||
for definition in dataset_defs_array:
|
||||
dataset_id = definition.get("id")
|
||||
if not dataset_id:
|
||||
if self.logger:
|
||||
self.logger.warning("Skipping dataset definition without id")
|
||||
continue
|
||||
|
||||
# Store definition without the id field (since id is the key)
|
||||
dataset_def = {k: v for k, v in definition.items() if k != "id"}
|
||||
self.datasets[dataset_id] = dataset_def
|
||||
|
||||
# Process dataset variables array and merge with definitions
|
||||
dataset_vars_array = variables_data.get("variables", [])
|
||||
for variables_info in dataset_vars_array:
|
||||
dataset_id = variables_info.get("dataset_id")
|
||||
if not dataset_id:
|
||||
if self.logger:
|
||||
self.logger.warning(
|
||||
"Skipping dataset variables without dataset_id"
|
||||
)
|
||||
continue
|
||||
|
||||
if dataset_id not in self.datasets:
|
||||
if self.logger:
|
||||
self.logger.warning(
|
||||
f"Found variables for unknown dataset: {dataset_id}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Convert variables array to dictionary format for internal use
|
||||
variables_list = variables_info.get("variables", [])
|
||||
variables_dict = {}
|
||||
streaming_variables = []
|
||||
|
||||
for var in variables_list:
|
||||
var_name = var.get("name")
|
||||
if not var_name:
|
||||
continue
|
||||
|
||||
# Build variable config (remove name since it's the key)
|
||||
var_config = {k: v for k, v in var.items() if k != "name"}
|
||||
variables_dict[var_name] = var_config
|
||||
|
||||
# Add to streaming list if enabled
|
||||
if var_config.get("streaming", False):
|
||||
streaming_variables.append(var_name)
|
||||
|
||||
# Add variables to dataset
|
||||
self.datasets[dataset_id]["variables"] = variables_dict
|
||||
self.datasets[dataset_id]["streaming_variables"] = streaming_variables
|
||||
|
||||
# Calculate active_datasets automatically from enabled field
|
||||
self.active_datasets = set()
|
||||
for dataset_id, definition in dataset_defs.items():
|
||||
for dataset_id, definition in self.datasets.items():
|
||||
if definition.get("enabled", False):
|
||||
self.active_datasets.add(dataset_id)
|
||||
|
||||
# current_dataset_id is optional for UI, use first available if not set
|
||||
self.current_dataset_id = definitions_data.get("current_dataset_id")
|
||||
if not self.current_dataset_id and self.datasets:
|
||||
self.current_dataset_id = None
|
||||
if self.datasets:
|
||||
self.current_dataset_id = next(iter(self.datasets.keys()))
|
||||
|
||||
if self.logger:
|
||||
|
@ -230,8 +271,7 @@ class ConfigManager:
|
|||
self.active_datasets = set(legacy_data.get("active_datasets", []))
|
||||
self.current_dataset_id = legacy_data.get("current_dataset_id")
|
||||
|
||||
# Save to new separated format
|
||||
self.save_datasets()
|
||||
# Note: Migration complete - data now managed by frontend via RJSF
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(
|
||||
|
@ -244,53 +284,9 @@ class ConfigManager:
|
|||
self.logger.error(f"Error migrating legacy datasets: {e}")
|
||||
raise
|
||||
|
||||
def save_datasets(self):
|
||||
"""Save datasets configuration to separated JSON files"""
|
||||
try:
|
||||
# timestamp removed as we don't save static fields anymore
|
||||
|
||||
# Prepare definitions data - only datasets, no static fields
|
||||
definitions_data = {
|
||||
"datasets": {},
|
||||
}
|
||||
|
||||
# Prepare variables data - only variables, no static fields
|
||||
variables_data = {
|
||||
"dataset_variables": {},
|
||||
}
|
||||
|
||||
# Split datasets into definitions and variables
|
||||
for dataset_id, dataset_info in self.datasets.items():
|
||||
# Extract definition (metadata only)
|
||||
definition = {
|
||||
key: value
|
||||
for key, value in dataset_info.items()
|
||||
if key not in ["variables", "streaming_variables"]
|
||||
}
|
||||
definitions_data["datasets"][dataset_id] = definition
|
||||
|
||||
# Extract variables
|
||||
variables_data["dataset_variables"][dataset_id] = {
|
||||
"variables": dataset_info.get("variables", {}),
|
||||
"streaming_variables": dataset_info.get("streaming_variables", []),
|
||||
}
|
||||
|
||||
# Save both files
|
||||
with open(self.dataset_definitions_file, "w") as f:
|
||||
json.dump(definitions_data, f, indent=4)
|
||||
|
||||
with open(self.dataset_variables_file, "w") as f:
|
||||
json.dump(variables_data, f, indent=4)
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(
|
||||
f"Datasets configuration saved to separated files: "
|
||||
f"{self.dataset_definitions_file} and {self.dataset_variables_file}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if self.logger:
|
||||
self.logger.error(f"Error saving datasets: {e}")
|
||||
# DEPRECATED: save_datasets() method removed
|
||||
# Data is now saved directly from frontend via RJSF and API endpoints
|
||||
# Use load_datasets_separated() to reload configuration when needed
|
||||
|
||||
def sync_streaming_variables(self):
|
||||
"""Synchronize streaming variables configuration"""
|
||||
|
@ -323,9 +319,12 @@ class ConfigManager:
|
|||
)
|
||||
|
||||
if sync_needed:
|
||||
self.save_datasets()
|
||||
# Note: Configuration is now managed by frontend via RJSF
|
||||
# No automatic save needed - frontend will save when user makes changes
|
||||
if self.logger:
|
||||
self.logger.info("Streaming variables configuration synchronized")
|
||||
self.logger.info(
|
||||
"Streaming variables configuration synchronized in memory"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if self.logger:
|
||||
|
@ -517,7 +516,7 @@ class ConfigManager:
|
|||
if not self.current_dataset_id:
|
||||
self.current_dataset_id = dataset_id
|
||||
|
||||
self.save_datasets()
|
||||
# Note: Dataset changes now saved via frontend RJSF
|
||||
return new_dataset
|
||||
|
||||
def delete_dataset(self, dataset_id: str):
|
||||
|
@ -537,7 +536,7 @@ class ConfigManager:
|
|||
next(iter(self.datasets.keys())) if self.datasets else None
|
||||
)
|
||||
|
||||
self.save_datasets()
|
||||
# Note: Dataset deletion now saved via frontend RJSF
|
||||
return dataset_info
|
||||
|
||||
def get_current_dataset(self):
|
||||
|
@ -618,7 +617,7 @@ class ConfigManager:
|
|||
if name not in self.datasets[dataset_id]["streaming_variables"]:
|
||||
self.datasets[dataset_id]["streaming_variables"].append(name)
|
||||
|
||||
self.save_datasets()
|
||||
# Note: Variable addition now saved via frontend RJSF
|
||||
return var_config
|
||||
|
||||
def remove_variable_from_dataset(self, dataset_id: str, name: str):
|
||||
|
@ -636,7 +635,7 @@ class ConfigManager:
|
|||
if name in self.datasets[dataset_id]["streaming_variables"]:
|
||||
self.datasets[dataset_id]["streaming_variables"].remove(name)
|
||||
|
||||
self.save_datasets()
|
||||
# Note: Variable removal now saved via frontend RJSF
|
||||
return var_config
|
||||
|
||||
def toggle_variable_streaming(self, dataset_id: str, name: str, enabled: bool):
|
||||
|
@ -658,7 +657,7 @@ class ConfigManager:
|
|||
if name in self.datasets[dataset_id]["streaming_variables"]:
|
||||
self.datasets[dataset_id]["streaming_variables"].remove(name)
|
||||
|
||||
self.save_datasets()
|
||||
# Note: Streaming toggle now saved via frontend RJSF
|
||||
|
||||
def activate_dataset(self, dataset_id: str):
|
||||
"""Mark a dataset as active"""
|
||||
|
@ -667,7 +666,7 @@ class ConfigManager:
|
|||
|
||||
self.datasets[dataset_id]["enabled"] = True
|
||||
self._update_active_datasets()
|
||||
self.save_datasets()
|
||||
# Note: Dataset activation now saved via frontend RJSF
|
||||
|
||||
def deactivate_dataset(self, dataset_id: str):
|
||||
"""Mark a dataset as inactive"""
|
||||
|
@ -676,7 +675,7 @@ class ConfigManager:
|
|||
|
||||
self.datasets[dataset_id]["enabled"] = False
|
||||
self._update_active_datasets()
|
||||
self.save_datasets()
|
||||
# Note: Dataset deactivation now saved via frontend RJSF
|
||||
|
||||
def _update_active_datasets(self):
|
||||
"""Update active_datasets based on enabled field of each dataset"""
|
||||
|
@ -688,14 +687,15 @@ class ConfigManager:
|
|||
def get_status(self):
|
||||
"""Get configuration status"""
|
||||
total_variables = sum(
|
||||
len(dataset["variables"]) for dataset in self.datasets.values()
|
||||
len(self.get_dataset_variables(dataset_id))
|
||||
for dataset_id in self.datasets.keys()
|
||||
)
|
||||
|
||||
# Count only variables that are in streaming_variables list AND have streaming=true
|
||||
total_streaming_vars = 0
|
||||
for dataset in self.datasets.values():
|
||||
for dataset_id, dataset in self.datasets.items():
|
||||
streaming_vars = dataset.get("streaming_variables", [])
|
||||
variables_config = dataset.get("variables", {})
|
||||
variables_config = self.get_dataset_variables(dataset_id)
|
||||
active_streaming_vars = [
|
||||
var
|
||||
for var in streaming_vars
|
||||
|
@ -717,12 +717,12 @@ class ConfigManager:
|
|||
dataset_id: {
|
||||
"name": info["name"],
|
||||
"prefix": info["prefix"],
|
||||
"variables_count": len(info["variables"]),
|
||||
"variables_count": len(self.get_dataset_variables(dataset_id)),
|
||||
"streaming_count": len(
|
||||
[
|
||||
var
|
||||
for var in info.get("streaming_variables", [])
|
||||
if info.get("variables", {})
|
||||
if self.get_dataset_variables(dataset_id)
|
||||
.get(var, {})
|
||||
.get("streaming", False)
|
||||
]
|
||||
|
|
|
@ -366,8 +366,8 @@ class PLCDataStreamer:
|
|||
"datasets_count": len(self.config_manager.datasets),
|
||||
"active_datasets_count": len(self.config_manager.active_datasets),
|
||||
"total_variables": sum(
|
||||
len(dataset["variables"])
|
||||
for dataset in self.config_manager.datasets.values()
|
||||
len(self.config_manager.get_dataset_variables(dataset_id))
|
||||
for dataset_id in self.config_manager.datasets.keys()
|
||||
),
|
||||
"streaming_variables_count": sum(
|
||||
len(dataset.get("streaming_variables", []))
|
||||
|
@ -586,7 +586,7 @@ class PLCDataStreamer:
|
|||
def current_dataset_id(self, value):
|
||||
"""Set current dataset ID (backward compatibility)"""
|
||||
self.config_manager.current_dataset_id = value
|
||||
self.config_manager.save_datasets()
|
||||
# Note: Dataset changes now saved via frontend RJSF
|
||||
|
||||
@property
|
||||
def connected(self):
|
||||
|
@ -598,6 +598,6 @@ class PLCDataStreamer:
|
|||
"""Get streaming status (backward compatibility)"""
|
||||
return self.data_streamer.is_streaming()
|
||||
|
||||
def save_datasets(self):
|
||||
"""Save datasets (backward compatibility)"""
|
||||
self.config_manager.save_datasets()
|
||||
# DEPRECATED: save_datasets() method removed
|
||||
# Data is now saved directly from frontend via RJSF and API endpoints
|
||||
# Use load_datasets() to reload configuration when needed
|
||||
|
|
|
@ -294,8 +294,7 @@ class PlotManager:
|
|||
|
||||
self.sessions[session_id] = session
|
||||
|
||||
# Guardar automáticamente la configuración
|
||||
self.save_plots()
|
||||
# Note: Plot session configuration now saved via frontend RJSF
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(
|
||||
|
@ -338,8 +337,7 @@ class PlotManager:
|
|||
|
||||
del self.sessions[session_id]
|
||||
|
||||
# Guardar automáticamente después de eliminar
|
||||
self.save_plots()
|
||||
# Note: Plot session removal now saved via frontend RJSF
|
||||
|
||||
return True
|
||||
return False
|
||||
|
@ -446,26 +444,57 @@ class PlotManager:
|
|||
self.session_counter = 0
|
||||
|
||||
def _load_plots_separated(self):
|
||||
"""Load plots from separated definition and variable files"""
|
||||
"""Load plots from separated definition and variable files (new array format)"""
|
||||
try:
|
||||
# Load definitions
|
||||
# Load definitions (new array format: {"plots": [array]})
|
||||
with open(self.plot_definitions_file, "r", encoding="utf-8") as f:
|
||||
definitions_data = json.load(f)
|
||||
|
||||
# Load variables
|
||||
# Load variables (new array format: {"variables": [array]})
|
||||
with open(self.plot_variables_file, "r", encoding="utf-8") as f:
|
||||
variables_data = json.load(f)
|
||||
|
||||
# Merge data back for session creation
|
||||
plots_data = definitions_data.get("plots", {})
|
||||
plot_vars = variables_data.get("plot_variables", {})
|
||||
# Convert new array format to internal dictionary format for compatibility
|
||||
plots_array = definitions_data.get("plots", [])
|
||||
plot_vars_array = variables_data.get("variables", [])
|
||||
|
||||
for session_id, plot_config in plots_data.items():
|
||||
# Add variables to config
|
||||
variables_info = plot_vars.get(session_id, {})
|
||||
# Build plot variables lookup by plot_id
|
||||
plot_variables_lookup = {}
|
||||
for plot_vars_entry in plot_vars_array:
|
||||
plot_id = plot_vars_entry.get("plot_id")
|
||||
if plot_id:
|
||||
# Convert variables array to format expected by PlotSession
|
||||
variables_list = plot_vars_entry.get("variables", [])
|
||||
# Convert to object format with variable names and properties
|
||||
variables_config = {}
|
||||
for var in variables_list:
|
||||
var_name = var.get("variable_name")
|
||||
if var_name:
|
||||
variables_config[var_name] = {
|
||||
"variable_name": var_name,
|
||||
"color": var.get("color", "#3498db"),
|
||||
"enabled": var.get("enabled", True),
|
||||
}
|
||||
plot_variables_lookup[plot_id] = variables_config
|
||||
|
||||
# Process plot definitions
|
||||
for plot_def in plots_array:
|
||||
session_id = plot_def.get("id") or plot_def.get("session_id")
|
||||
if not session_id:
|
||||
if self.logger:
|
||||
self.logger.warning("Skipping plot definition without id")
|
||||
continue
|
||||
|
||||
# Build full config with variables
|
||||
full_config = {
|
||||
**plot_config,
|
||||
"variables": variables_info.get("variables", []),
|
||||
"name": plot_def.get("name", f"Plot {session_id}"),
|
||||
"time_window": plot_def.get("time_window", 60),
|
||||
"y_min": plot_def.get("y_min"),
|
||||
"y_max": plot_def.get("y_max"),
|
||||
"trigger_variable": plot_def.get("trigger_variable"),
|
||||
"trigger_enabled": plot_def.get("trigger_enabled", False),
|
||||
"trigger_on_true": plot_def.get("trigger_on_true", True),
|
||||
"variables": plot_variables_lookup.get(session_id, {}),
|
||||
}
|
||||
|
||||
# Create session with full configuration
|
||||
|
@ -478,16 +507,16 @@ class PlotManager:
|
|||
|
||||
# Update counter to avoid duplicate IDs
|
||||
try:
|
||||
session_num = int(session_id.split("_")[1])
|
||||
if session_num >= self.session_counter:
|
||||
self.session_counter = session_num + 1
|
||||
if session_id.startswith("plot_"):
|
||||
session_num = int(session_id.split("_")[1])
|
||||
if session_num >= self.session_counter:
|
||||
self.session_counter = session_num + 1
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
|
||||
# Load counter from definitions
|
||||
saved_counter = definitions_data.get("session_counter", len(self.sessions))
|
||||
if saved_counter > self.session_counter:
|
||||
self.session_counter = saved_counter
|
||||
# Ensure session counter is at least the number of sessions
|
||||
if len(self.sessions) >= self.session_counter:
|
||||
self.session_counter = len(self.sessions)
|
||||
|
||||
if self.logger and self.sessions:
|
||||
self.logger.info(
|
||||
|
@ -527,8 +556,7 @@ class PlotManager:
|
|||
if saved_counter > self.session_counter:
|
||||
self.session_counter = saved_counter
|
||||
|
||||
# Save to new separated format
|
||||
self.save_plots()
|
||||
# Note: Migration complete - data now managed by frontend via RJSF
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(
|
||||
|
@ -540,53 +568,9 @@ class PlotManager:
|
|||
self.logger.error(f"Error migrating legacy plots: {e}")
|
||||
raise
|
||||
|
||||
def save_plots(self):
|
||||
"""Guardar plots a archivos separados de persistencia"""
|
||||
try:
|
||||
# Prepare definitions data - only plots, no static fields
|
||||
definitions_data = {
|
||||
"plots": {},
|
||||
}
|
||||
|
||||
# Prepare variables data - only variables, no static fields
|
||||
variables_data = {
|
||||
"plot_variables": {},
|
||||
}
|
||||
|
||||
# Split sessions into definitions and variables
|
||||
for session_id, session in self.sessions.items():
|
||||
# Extract definition (metadata without variables)
|
||||
definitions_data["plots"][session_id] = {
|
||||
"name": session.name,
|
||||
"time_window": session.time_window,
|
||||
"y_min": session.y_min,
|
||||
"y_max": session.y_max,
|
||||
"trigger_variable": session.trigger_variable,
|
||||
"trigger_enabled": session.trigger_enabled,
|
||||
"trigger_on_true": session.trigger_on_true,
|
||||
"session_id": session_id,
|
||||
}
|
||||
|
||||
# Extract variables
|
||||
variables_data["plot_variables"][session_id] = {
|
||||
"variables": session.variables,
|
||||
}
|
||||
|
||||
# Save both files
|
||||
with open(self.plot_definitions_file, "w", encoding="utf-8") as f:
|
||||
json.dump(definitions_data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
with open(self.plot_variables_file, "w", encoding="utf-8") as f:
|
||||
json.dump(variables_data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
if self.logger:
|
||||
self.logger.debug(
|
||||
f"Saved {len(self.sessions)} plot sessions to separated files"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if self.logger:
|
||||
self.logger.error(f"Error saving plot sessions: {e}")
|
||||
# DEPRECATED: save_plots() method removed
|
||||
# Data is now saved directly from frontend via RJSF and API endpoints
|
||||
# Use _load_plots_separated() to reload configuration when needed
|
||||
|
||||
def update_session_config(self, session_id: str, config: Dict[str, Any]) -> bool:
|
||||
"""Actualizar configuración de una sesión existente"""
|
||||
|
@ -632,8 +616,7 @@ class PlotManager:
|
|||
old_data = list(session.data[var])
|
||||
session.data[var] = deque(old_data, maxlen=max_points)
|
||||
|
||||
# Guardar cambios
|
||||
self.save_plots()
|
||||
# Note: Plot session configuration changes now saved via frontend RJSF
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(f"Updated plot session '{session.name}' configuration")
|
||||
|
|
|
@ -315,7 +315,7 @@ class ConfigSchemaManager:
|
|||
if current and current in datasets:
|
||||
self.config_manager.current_dataset_id = current
|
||||
|
||||
self.config_manager.save_datasets()
|
||||
# Note: Data is now persisted directly via frontend RJSF
|
||||
return {"success": True}
|
||||
|
||||
if config_id == "plot-definitions":
|
||||
|
@ -342,8 +342,7 @@ class ConfigSchemaManager:
|
|||
else:
|
||||
self.plot_manager.session_counter = 0
|
||||
|
||||
# Save to separated files
|
||||
self.plot_manager.save_plots()
|
||||
# Note: Data is now persisted directly via frontend RJSF
|
||||
|
||||
except Exception as e:
|
||||
if self.logger:
|
||||
|
@ -376,9 +375,11 @@ class ConfigSchemaManager:
|
|||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(self.read_config("plc"), f, indent=2)
|
||||
elif config_id == "datasets":
|
||||
self.config_manager.save_datasets() # Now saves to separated files
|
||||
# Note: Datasets now managed via separated files by frontend RJSF
|
||||
pass
|
||||
elif config_id == "plots":
|
||||
self.plot_manager.save_plots() # Now saves to separated files
|
||||
# Note: Plots now managed via separated files by frontend RJSF
|
||||
pass
|
||||
elif config_id in [
|
||||
"dataset-definitions",
|
||||
"dataset-variables",
|
||||
|
|
|
@ -683,7 +683,9 @@ class DataStreamer:
|
|||
f"Dataset activated: {dataset_info['name']}",
|
||||
{
|
||||
"dataset_id": dataset_id,
|
||||
"variables_count": len(dataset_info["variables"]),
|
||||
"variables_count": len(
|
||||
self.config_manager.get_dataset_variables(dataset_id)
|
||||
),
|
||||
"streaming_count": len(dataset_info["streaming_variables"]),
|
||||
"prefix": dataset_info["prefix"],
|
||||
},
|
||||
|
|
|
@ -15,13 +15,14 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
isRealTimeMode: true,
|
||||
refreshRate: 1000,
|
||||
userOverrideUntil: 0,
|
||||
userPaused: false
|
||||
userPaused: false,
|
||||
sessionId: null
|
||||
});
|
||||
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState(null);
|
||||
const [dataPointsCount, setDataPointsCount] = useState(0);
|
||||
const [resolvedConfig, setResolvedConfig] = useState(null);
|
||||
const resolvedConfigRef = useRef(null);
|
||||
|
||||
const bgColor = useColorModeValue('white', 'gray.800');
|
||||
const textColor = useColorModeValue('gray.600', 'gray.300');
|
||||
|
@ -55,6 +56,17 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
if (!variables) return [];
|
||||
|
||||
if (Array.isArray(variables)) {
|
||||
// Handle array of objects with variable_name property
|
||||
if (variables.length > 0 && typeof variables[0] === 'object' && variables[0].variable_name) {
|
||||
return variables
|
||||
.filter(varConfig => varConfig.enabled !== false && varConfig.variable_name)
|
||||
.map((varConfig, index) => ({
|
||||
name: varConfig.variable_name,
|
||||
color: varConfig.color || getColor(varConfig.variable_name, index),
|
||||
enabled: varConfig.enabled !== false
|
||||
}));
|
||||
}
|
||||
// Handle simple array of strings
|
||||
return variables.map((variable, index) => ({
|
||||
name: variable,
|
||||
color: getColor(variable, index),
|
||||
|
@ -84,9 +96,11 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
}, [getColor]);
|
||||
|
||||
const createStreamingChart = useCallback(async () => {
|
||||
const cfg = resolvedConfig || session?.config;
|
||||
const cfg = resolvedConfigRef.current || session?.config;
|
||||
if (!canvasRef.current || !cfg) return;
|
||||
|
||||
console.log(`🔧 Creating chart for session ${session?.session_id}...`);
|
||||
|
||||
try {
|
||||
// Ensure Chart.js and plugins are loaded
|
||||
if (typeof window.Chart === 'undefined') {
|
||||
|
@ -246,10 +260,10 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
sessionDataRef.current.isRealTimeMode = true;
|
||||
sessionDataRef.current.noDataCycles = 0;
|
||||
// Sync ingest pause state with initial chart pause
|
||||
const initialPaused = !session.is_active || session.is_paused;
|
||||
const initialPaused = !session?.is_active || session?.is_paused;
|
||||
sessionDataRef.current.ingestPaused = initialPaused;
|
||||
sessionDataRef.current.isPaused = initialPaused;
|
||||
console.log(`✅ Plot ${session.session_id}: Real-time Streaming enabled`);
|
||||
console.log(`✅ Plot ${session?.session_id}: Real-time Streaming enabled`);
|
||||
|
||||
setIsLoading(false);
|
||||
setError(null);
|
||||
|
@ -259,10 +273,11 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
setError(error.message);
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [session, resolvedConfig, getEnabledVariables, getColor]);
|
||||
}, []);
|
||||
|
||||
const onStreamingRefresh = useCallback(async (chart) => {
|
||||
if (!session?.session_id) return;
|
||||
const sessionId = sessionDataRef.current.sessionId;
|
||||
if (!sessionId) return;
|
||||
|
||||
try {
|
||||
const now = Date.now();
|
||||
|
@ -276,14 +291,18 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
sessionDataRef.current.lastDataFetch = now;
|
||||
|
||||
// Fetch data from backend
|
||||
const response = await fetch(`/api/plots/${session.session_id}/data`);
|
||||
const response = await fetch(`/api/plots/${sessionId}/data`);
|
||||
if (!response.ok) return;
|
||||
|
||||
const plotData = await response.json();
|
||||
|
||||
|
||||
// Add new data to chart
|
||||
const pointsAdded = addNewDataToStreaming(plotData, now);
|
||||
updatePointsCounter(plotData);
|
||||
|
||||
if (pointsAdded > 0) {
|
||||
console.log(`📊 Plot ${sessionId}: Added ${pointsAdded} points to chart`);
|
||||
}
|
||||
|
||||
// Auto-pause when no data arrives for several cycles; resume when data appears
|
||||
if (pointsAdded > 0) {
|
||||
|
@ -308,9 +327,9 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error(`📈 Error in streaming refresh for ${session.session_id}:`, error);
|
||||
console.error(`📈 Error in streaming refresh for ${sessionDataRef.current.sessionId}:`, error);
|
||||
}
|
||||
}, [session?.session_id]);
|
||||
}, []);
|
||||
|
||||
const addNewDataToStreaming = useCallback((plotData, timestamp) => {
|
||||
if (!chartRef.current || !plotData) return 0;
|
||||
|
@ -485,6 +504,9 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
|
||||
// Also expose control functions through props for easier access
|
||||
React.useEffect(() => {
|
||||
// Update sessionId ref when session changes
|
||||
sessionDataRef.current.sessionId = session?.session_id || null;
|
||||
|
||||
if (typeof session?.onChartReady === 'function') {
|
||||
session.onChartReady({
|
||||
pauseStreaming,
|
||||
|
@ -492,7 +514,7 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
clearChart
|
||||
});
|
||||
}
|
||||
}, [pauseStreaming, resumeStreaming, clearChart, session]);
|
||||
}, [pauseStreaming, resumeStreaming, clearChart, session?.session_id, session?.onChartReady]);
|
||||
|
||||
// Update chart when session status changes
|
||||
useEffect(() => {
|
||||
|
@ -510,53 +532,17 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
}
|
||||
}, [session?.is_active, session?.is_paused, pauseStreaming, resumeStreaming]);
|
||||
|
||||
// Resolve config: use provided session.config or fetch from backend
|
||||
// Initialize chart when config is resolved - simplified approach
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
async function resolveConfig() {
|
||||
try {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
// If config already present in session, use it
|
||||
if (session?.config) {
|
||||
if (!cancelled) {
|
||||
setResolvedConfig(session.config);
|
||||
setIsLoading(false);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Otherwise fetch it from backend
|
||||
if (session?.session_id) {
|
||||
const resp = await fetch(`/api/plots/${session.session_id}/config`);
|
||||
if (!resp.ok) {
|
||||
const txt = await resp.text().catch(() => resp.statusText);
|
||||
throw new Error(`HTTP ${resp.status}: ${txt || resp.statusText}`);
|
||||
}
|
||||
const data = await resp.json();
|
||||
if (data?.success && data?.config) {
|
||||
if (!cancelled) setResolvedConfig(data.config);
|
||||
} else {
|
||||
throw new Error('Plot config not available');
|
||||
}
|
||||
} else {
|
||||
throw new Error('Invalid session (missing session_id)');
|
||||
}
|
||||
} catch (e) {
|
||||
if (!cancelled) setError(e.message || 'Error loading plot config');
|
||||
} finally {
|
||||
if (!cancelled) setIsLoading(false);
|
||||
// Only create chart once when we have a session_id and canvas
|
||||
if (session?.session_id && canvasRef.current && !chartRef.current) {
|
||||
const config = session?.config;
|
||||
if (config) {
|
||||
resolvedConfigRef.current = config;
|
||||
createStreamingChart();
|
||||
}
|
||||
}
|
||||
|
||||
resolveConfig();
|
||||
return () => { cancelled = true; };
|
||||
}, [session?.session_id, session?.config]);
|
||||
|
||||
// Initialize chart when config is resolved
|
||||
useEffect(() => {
|
||||
if (resolvedConfig) {
|
||||
createStreamingChart();
|
||||
}
|
||||
|
||||
return () => {
|
||||
try {
|
||||
if (chartRef.current) {
|
||||
|
@ -573,7 +559,7 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
|
|||
clearInterval(sessionDataRef.current.manualInterval);
|
||||
}
|
||||
};
|
||||
}, [resolvedConfig, createStreamingChart]);
|
||||
}, [session?.session_id]);
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useEffect, useRef, useState, useCallback } from 'react'
|
||||
import React, { useEffect, useRef, useState, useCallback, useMemo } from 'react'
|
||||
import {
|
||||
Box,
|
||||
VStack,
|
||||
|
@ -67,9 +67,13 @@ export default function PlotRealtimeSession({
|
|||
const borderColor = useColorModeValue('gray.200', 'gray.600')
|
||||
const muted = useColorModeValue('gray.600', 'gray.300')
|
||||
|
||||
// Enhanced session object for ChartjsPlot
|
||||
const enhancedSession = {
|
||||
...session,
|
||||
// Enhanced session object for ChartjsPlot - memoized to prevent recreations
|
||||
const enhancedSession = useMemo(() => ({
|
||||
session_id: plotDefinition.id,
|
||||
name: plotDefinition.name,
|
||||
is_active: session.is_active,
|
||||
is_paused: session.is_paused,
|
||||
variables_count: plotVariables.length,
|
||||
config: {
|
||||
...plotDefinition,
|
||||
...localConfig,
|
||||
|
@ -78,7 +82,15 @@ export default function PlotRealtimeSession({
|
|||
onChartReady: (controls) => {
|
||||
chartControlsRef.current = controls
|
||||
}
|
||||
}
|
||||
}), [
|
||||
plotDefinition.id,
|
||||
plotDefinition.name,
|
||||
plotDefinition,
|
||||
session.is_active,
|
||||
session.is_paused,
|
||||
plotVariables,
|
||||
localConfig
|
||||
])
|
||||
|
||||
// Load session status from backend (optional - session may not exist until started)
|
||||
const refreshSessionStatus = useCallback(async () => {
|
||||
|
@ -132,29 +144,7 @@ export default function PlotRealtimeSession({
|
|||
|
||||
// Control plot session (start, pause, stop, clear)
|
||||
const handleControlClick = async (action) => {
|
||||
// Apply immediate local feedback
|
||||
if (chartControlsRef.current) {
|
||||
switch (action) {
|
||||
case 'pause':
|
||||
chartControlsRef.current.pauseStreaming()
|
||||
setSession(prev => ({ ...prev, is_paused: true }))
|
||||
break
|
||||
case 'start':
|
||||
case 'resume':
|
||||
chartControlsRef.current.resumeStreaming()
|
||||
setSession(prev => ({ ...prev, is_active: true, is_paused: false }))
|
||||
break
|
||||
case 'clear':
|
||||
chartControlsRef.current.clearChart()
|
||||
break
|
||||
case 'stop':
|
||||
chartControlsRef.current.pauseStreaming()
|
||||
setSession(prev => ({ ...prev, is_active: false, is_paused: false }))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Send command to backend
|
||||
// Send command to backend first
|
||||
try {
|
||||
// For 'start' action, create the plot session first if it doesn't exist
|
||||
if (action === 'start') {
|
||||
|
@ -177,9 +167,45 @@ export default function PlotRealtimeSession({
|
|||
}
|
||||
}
|
||||
|
||||
// Send control command to backend
|
||||
await api.controlPlotSession(plotDefinition.id, action)
|
||||
// Refresh status after backend command
|
||||
setTimeout(refreshSessionStatus, 500)
|
||||
|
||||
// For 'start' action, verify that the session is actually active
|
||||
if (action === 'start') {
|
||||
// Wait a bit and verify the session started
|
||||
await new Promise(resolve => setTimeout(resolve, 300))
|
||||
const verifyResponse = await api.getPlotSession(plotDefinition.id)
|
||||
if (!verifyResponse?.config?.is_active) {
|
||||
// Try the control command once more if not active
|
||||
console.log('Session not active, retrying control command...')
|
||||
await api.controlPlotSession(plotDefinition.id, action)
|
||||
}
|
||||
}
|
||||
|
||||
// Apply local feedback after successful backend response
|
||||
if (chartControlsRef.current) {
|
||||
switch (action) {
|
||||
case 'pause':
|
||||
chartControlsRef.current.pauseStreaming()
|
||||
setSession(prev => ({ ...prev, is_paused: true }))
|
||||
break
|
||||
case 'start':
|
||||
case 'resume':
|
||||
chartControlsRef.current.resumeStreaming()
|
||||
setSession(prev => ({ ...prev, is_active: true, is_paused: false }))
|
||||
break
|
||||
case 'clear':
|
||||
chartControlsRef.current.clearChart()
|
||||
break
|
||||
case 'stop':
|
||||
chartControlsRef.current.pauseStreaming()
|
||||
setSession(prev => ({ ...prev, is_active: false, is_paused: false }))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh status after backend command (shorter delay)
|
||||
setTimeout(refreshSessionStatus, 200)
|
||||
} catch (error) {
|
||||
toast({
|
||||
title: `❌ Failed to ${action} plot`,
|
||||
|
|
2
main.py
2
main.py
|
@ -1270,7 +1270,7 @@ def set_current_dataset():
|
|||
|
||||
if dataset_id and dataset_id in streamer.datasets:
|
||||
streamer.current_dataset_id = dataset_id
|
||||
streamer.save_datasets()
|
||||
# Note: No need to save - this is just changing current selection in memory
|
||||
return jsonify(
|
||||
{
|
||||
"success": True,
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
{
|
||||
"last_state": {
|
||||
"should_connect": false,
|
||||
"should_connect": true,
|
||||
"should_stream": false,
|
||||
"active_datasets": []
|
||||
"active_datasets": [
|
||||
"Test",
|
||||
"DAR",
|
||||
"Fast"
|
||||
]
|
||||
},
|
||||
"auto_recovery_enabled": true,
|
||||
"last_update": "2025-08-13T14:54:09.753196"
|
||||
"last_update": "2025-08-14T11:14:46.738038"
|
||||
}
|
Loading…
Reference in New Issue