feat: Enhance DataStreamer with critical validation and performance monitoring improvements

- Refactored performance monitoring initialization for better readability.
- Added critical validation to ensure buffered data matches current dataset variables, preventing CSV corruption.
- Implemented logging for data inconsistencies and buffer clearing during variable modifications.
- Improved error handling and logging during CSV flushing and dataset reading.
- Enhanced UDP streaming error handling and logging.
- Updated threading management for CSV flushing and dataset streaming.
- Added comprehensive performance metrics recording for dataset reads and writes.

feat: Localize historical plot management components

- Integrated i18n support in PlotHistoricalManager, PlotHistoricalSession, and TimePointSelector components.
- Translated various UI strings related to historical plots, time selection, and console logs into English, Spanish, and Italian.
- Improved user experience by providing localized messages for actions and statuses.

fix: Update system state configuration

- Changed `should_connect` to true and added active datasets to the system state.
- Updated the last update timestamp and specified the path for PlotJuggler.
This commit is contained in:
Miguel 2025-08-17 13:01:36 +02:00
parent ac87ce2568
commit 00c021f496
10 changed files with 2704 additions and 2616 deletions

File diff suppressed because it is too large Load Diff

View File

@ -58,7 +58,9 @@ class DataStreamer:
self.recording_protector = RecordingProtector(logger)
# 📊 PERFORMANCE MONITORING - Real-time performance tracking
self.performance_monitor = PerformanceMonitor(logger, event_logger, report_interval=10.0)
self.performance_monitor = PerformanceMonitor(
logger, event_logger, report_interval=10.0
)
# UDP streaming setup
self.udp_socket = None
@ -380,16 +382,15 @@ class DataStreamer:
return
timestamp = datetime.now()
with self.csv_buffer_lock:
if dataset_id not in self.csv_buffer:
self.csv_buffer[dataset_id] = []
# Add data to buffer with timestamp
self.csv_buffer[dataset_id].append({
'timestamp': timestamp,
'data': data.copy()
})
self.csv_buffer[dataset_id].append(
{"timestamp": timestamp, "data": data.copy()}
)
def _csv_flush_loop(self):
"""🚨 Background thread to flush CSV buffers every flush_interval seconds"""
@ -406,22 +407,46 @@ class DataStreamer:
flush_start_time = time.time()
total_points_written = 0
datasets_flushed = 0
with self.csv_buffer_lock:
for dataset_id, buffer_data in self.csv_buffer.items():
if not buffer_data:
continue
try:
self.setup_dataset_csv_file(dataset_id)
if dataset_id in self.dataset_csv_writers:
dataset_variables = self.config_manager.get_dataset_variables(dataset_id)
dataset_variables = self.config_manager.get_dataset_variables(
dataset_id
)
for entry in buffer_data:
timestamp_str = entry['timestamp'].strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
data = entry['data']
timestamp_str = entry["timestamp"].strftime(
"%Y-%m-%d %H:%M:%S.%f"
)[:-3]
data = entry["data"]
# 🚨 CRITICAL VALIDATION: Check if buffered data matches current dataset variables
current_vars = set(dataset_variables.keys())
buffered_vars = set(data.keys())
if current_vars != buffered_vars:
# Data inconsistency detected - skip this entry to prevent CSV corruption
missing_vars = current_vars - buffered_vars
extra_vars = buffered_vars - current_vars
if self.logger:
dataset_name = self.config_manager.datasets.get(
dataset_id, {}
).get("name", dataset_id)
self.logger.warning(
f"🚨 SKIPPING inconsistent buffered data for dataset '{dataset_name}': "
f"Missing vars: {missing_vars}, Extra vars: {extra_vars}. "
f"This prevents CSV corruption during variable modifications."
)
continue # Skip this entry
# Create row with all variables for this dataset
row = [timestamp_str]
for var_name in dataset_variables.keys():
@ -430,26 +455,30 @@ class DataStreamer:
if isinstance(value, bool):
value = 1 if value else 0
row.append(value)
self.dataset_csv_writers[dataset_id].writerow(row)
total_points_written += 1
# Flush file to disk
self.dataset_csv_files[dataset_id].flush()
datasets_flushed += 1
except Exception as e:
if self.logger:
self.logger.error(f"Error flushing CSV buffer for dataset {dataset_id}: {e}")
self.logger.error(
f"Error flushing CSV buffer for dataset {dataset_id}: {e}"
)
# Clear all buffers after successful flush
self.csv_buffer.clear()
flush_time = time.time() - flush_start_time
if total_points_written > 0 and self.logger:
# 🚨 FIX: Avoid division by zero when flush_time is 0
throughput = total_points_written / max(flush_time, 0.001) # Minimum 1ms to avoid division by zero
throughput = total_points_written / max(
flush_time, 0.001
) # Minimum 1ms to avoid division by zero
self.logger.debug(
f"📝 CSV Batch Flush: {total_points_written} points written across {datasets_flushed} datasets "
f"in {flush_time:.3f}s ({throughput:.1f} points/sec)"
@ -459,31 +488,31 @@ class DataStreamer:
"""🚨 Start the asynchronous CSV flush thread"""
if self.csv_flush_active:
return
self.csv_flush_active = True
self.csv_flush_thread = threading.Thread(
target=self._csv_flush_loop,
name="csv_flush_thread",
daemon=True
target=self._csv_flush_loop, name="csv_flush_thread", daemon=True
)
self.csv_flush_thread.start()
if self.logger:
self.logger.info(f"📝 CSV async flush thread started (interval: {self.csv_flush_interval}s)")
self.logger.info(
f"📝 CSV async flush thread started (interval: {self.csv_flush_interval}s)"
)
def stop_csv_flush_thread(self):
"""🚨 Stop the asynchronous CSV flush thread and flush remaining data"""
if not self.csv_flush_active:
return
self.csv_flush_active = False
# Final flush of any remaining data
self._flush_csv_buffers()
if self.csv_flush_thread and self.csv_flush_thread.is_alive():
self.csv_flush_thread.join(timeout=5.0)
if self.logger:
self.logger.info("📝 CSV async flush thread stopped, final flush completed")
@ -493,6 +522,17 @@ class DataStreamer:
return
try:
# 🚨 CRITICAL FIX: Clear CSV buffer for this dataset to prevent writing old data with new headers
with self.csv_buffer_lock:
if dataset_id in self.csv_buffer:
old_buffer_size = len(self.csv_buffer[dataset_id])
self.csv_buffer[dataset_id].clear()
if self.logger and old_buffer_size > 0:
self.logger.info(
f"🚨 CRITICAL: Cleared {old_buffer_size} buffered entries for dataset '{self.config_manager.datasets[dataset_id]['name']}' "
f"to prevent data corruption during variable modification"
)
# Close current file if open
if dataset_id in self.dataset_csv_files:
self.dataset_csv_files[dataset_id].close()
@ -543,6 +583,7 @@ class DataStreamer:
"file_path": csv_path,
"variables_count": len(dataset_variables),
"reason": "variable_modification",
"buffer_cleared": True,
},
)
@ -576,7 +617,7 @@ class DataStreamer:
Called by: dataset_streaming_loop() at configured intervals
Updates: self.last_read_values cache for use by all other functions
🚨 CRITICAL FIX: Returns None if ANY variable fails - prevents corrupt CSV data
🚀 PERFORMANCE FIX: Uses batch reading to reduce snap7 calls and improve timing
"""
@ -588,11 +629,11 @@ class DataStreamer:
try:
# 🚀 NEW: Use batch reading for improved performance
batch_results = self.plc_client.read_variables_batch(variables)
for var_name, value in batch_results.items():
if value is not None:
data[var_name] = value
# Clear any previous error for this variable
if (
dataset_id in self.last_read_errors
@ -603,7 +644,7 @@ class DataStreamer:
# Variable read failed
var_config = variables.get(var_name, {})
error_msg = f"Batch read failed for variable {var_name}"
if self.logger:
self.logger.warning(
f"Error reading variable {var_name} in dataset {dataset_id}: {error_msg}"
@ -611,12 +652,14 @@ class DataStreamer:
data[var_name] = None
errors[var_name] = error_msg
failed_variables.append(var_name)
except Exception as e:
# Fall back to individual reads if batch reading fails completely
if self.logger:
self.logger.warning(f"Batch reading failed for dataset {dataset_id}, falling back to individual reads: {e}")
self.logger.warning(
f"Batch reading failed for dataset {dataset_id}, falling back to individual reads: {e}"
)
for var_name, var_config in variables.items():
try:
value = self.plc_client.read_variable(var_config)
@ -642,7 +685,9 @@ class DataStreamer:
# This ensures that incomplete data is never written to CSV
if failed_variables:
if self.logger:
dataset_name = self.config_manager.datasets.get(dataset_id, {}).get('name', dataset_id)
dataset_name = self.config_manager.datasets.get(dataset_id, {}).get(
"name", dataset_id
)
self.logger.warning(
f"🚨 CRITICAL: Dataset '{dataset_name}' read failed - {len(failed_variables)} variables failed: {failed_variables}. "
f"Skipping CSV write to prevent data corruption."
@ -769,7 +814,7 @@ class DataStreamer:
"""🔑 HIGH PRIORITY: Streaming loop for CSV recording - CRITICAL PRIORITY THREAD with performance monitoring"""
dataset_info = self.config_manager.datasets[dataset_id]
interval = self.config_manager.get_dataset_sampling_interval(dataset_id)
# 📊 Register dataset with performance monitor
self.performance_monitor.set_dataset_interval(dataset_id, interval)
@ -790,19 +835,23 @@ class DataStreamer:
read_time = 0.0
csv_write_time = 0.0
variables_count = 0
try:
# 📋 CRITICAL SECTION: PLC READ with timing and error tracking
dataset_variables = self.config_manager.get_dataset_variables(dataset_id)
dataset_variables = self.config_manager.get_dataset_variables(
dataset_id
)
variables_count = len(dataset_variables)
# Measure read operation time
read_start = time.time()
# Ensure entire dataset read is atomic w.r.t. other datasets
with self.plc_client.io_lock:
all_data = self.read_dataset_variables(dataset_id, dataset_variables)
all_data = self.read_dataset_variables(
dataset_id, dataset_variables
)
read_time = time.time() - read_start
# 🚨 CRITICAL FIX: Proper validation - all_data is None if ANY variable failed
read_success = all_data is not None
@ -817,24 +866,33 @@ class DataStreamer:
# 🚨 NEW: Use async buffering instead of immediate write
self.buffer_dataset_csv_data(dataset_id, all_data)
csv_write_time = time.time() - csv_start
# 📊 Record successful CSV buffer operation (much faster)
self.performance_monitor.record_csv_write(dataset_id, csv_write_time, success=True)
self.performance_monitor.record_csv_write(
dataset_id, csv_write_time, success=True
)
except Exception as csv_error:
csv_write_time = time.time() - csv_start
# 📊 Record CSV buffer error
self.performance_monitor.record_csv_write(dataset_id, csv_write_time, success=False)
self.performance_monitor.record_csv_write(
dataset_id, csv_write_time, success=False
)
if self.logger:
self.logger.error(f"🚨 CSV BUFFER ERROR for dataset '{dataset_info['name']}': {csv_error}")
self.logger.error(
f"🚨 CSV BUFFER ERROR for dataset '{dataset_info['name']}': {csv_error}"
)
# 📡 UDP Streaming: Lower priority - only if enabled
if self.udp_streaming_enabled:
# Use background thread for UDP to not block recording
self.priority_manager.submit_background_task(
self._handle_udp_streaming, dataset_id, dataset_info, all_data
self._handle_udp_streaming,
dataset_id,
dataset_info,
all_data,
)
# 📈 PLOT MANAGER: Background priority - update plots without blocking
@ -851,7 +909,7 @@ class DataStreamer:
f"🚨 CRITICAL: Dataset '{dataset_info['name']}' read completely failed. "
f"No CSV data written to prevent corruption (consecutive errors: {consecutive_errors})"
)
if consecutive_errors >= max_consecutive_errors:
self.event_logger.log_event(
"error",
@ -860,7 +918,7 @@ class DataStreamer:
{
"dataset_id": dataset_id,
"consecutive_errors": consecutive_errors,
"priority": "CRITICAL"
"priority": "CRITICAL",
},
)
break
@ -869,17 +927,17 @@ class DataStreamer:
loop_end_time = time.time()
total_loop_time = loop_end_time - loop_start_time
expected_end_time = loop_start_time + interval
# Calculate delay (how much we're behind schedule)
delay = max(0.0, loop_end_time - expected_end_time)
# 📊 Record performance metrics
self.performance_monitor.record_dataset_read(
dataset_id=dataset_id,
read_time=read_time,
variables_count=variables_count,
success=read_success,
delay=delay
delay=delay,
)
# Maintain sampling interval
@ -895,16 +953,16 @@ class DataStreamer:
except Exception as e:
consecutive_errors += 1
# 📊 Record read error
self.performance_monitor.record_dataset_read(
dataset_id=dataset_id,
read_time=read_time,
variables_count=variables_count,
success=False,
delay=0.0
delay=0.0,
)
self.event_logger.log_event(
"error",
"dataset_loop_error",
@ -915,7 +973,7 @@ class DataStreamer:
"consecutive_errors": consecutive_errors,
"priority": "CRITICAL",
"read_time": read_time,
"variables_count": variables_count
"variables_count": variables_count,
},
)
@ -927,7 +985,7 @@ class DataStreamer:
{
"dataset_id": dataset_id,
"consecutive_errors": consecutive_errors,
"priority": "CRITICAL"
"priority": "CRITICAL",
},
)
break
@ -937,9 +995,13 @@ class DataStreamer:
# 🔑 FIXED: Do NOT call stop_dataset_streaming from within the loop
# The thread will be cleaned up externally when needed
if self.logger:
self.logger.info(f"🔥 CRITICAL: Dataset '{dataset_info['name']}' recording loop ended")
self.logger.info(
f"🔥 CRITICAL: Dataset '{dataset_info['name']}' recording loop ended"
)
def _handle_udp_streaming(self, dataset_id: str, dataset_info: dict, all_data: dict):
def _handle_udp_streaming(
self, dataset_id: str, dataset_info: dict, all_data: dict
):
"""Handle UDP streaming in background thread (lower priority) with performance tracking"""
try:
# Get filtered data for streaming - only variables that are in streaming_variables list AND have streaming=true
@ -956,17 +1018,23 @@ class DataStreamer:
if streaming_data:
try:
self.send_to_plotjuggler(streaming_data)
# 📊 Record successful UDP send
self.performance_monitor.record_udp_send(len(streaming_data), success=True)
self.performance_monitor.record_udp_send(
len(streaming_data), success=True
)
except Exception as udp_error:
# 📊 Record UDP send error
self.performance_monitor.record_udp_send(len(streaming_data), success=False)
self.performance_monitor.record_udp_send(
len(streaming_data), success=False
)
if self.logger:
self.logger.warning(f"UDP streaming error for dataset '{dataset_info['name']}': {udp_error}")
self.logger.warning(
f"UDP streaming error for dataset '{dataset_info['name']}': {udp_error}"
)
if self.logger:
udp_count = len(streaming_data)
self.logger.debug(
@ -1005,9 +1073,9 @@ class DataStreamer:
target=self.dataset_streaming_loop,
args=(dataset_id,),
dataset_id=dataset_id,
name=f"recording_{dataset_id}"
name=f"recording_{dataset_id}",
)
self.dataset_threads[dataset_id] = thread
thread.start()
@ -1025,7 +1093,7 @@ class DataStreamer:
if dataset_id in self.dataset_threads:
# Use priority manager to safely stop the recording thread
self.priority_manager.stop_recording_thread(dataset_id, timeout=5.0)
# Clean up the reference
if dataset_id in self.dataset_threads:
del self.dataset_threads[dataset_id]
@ -1115,10 +1183,10 @@ class DataStreamer:
# 🔥 ENABLE RECORDING PROTECTION MODE
self.recording_protector.start_recording_protection()
# 📊 START PERFORMANCE MONITORING
self.performance_monitor.start_monitoring()
# 🚨 START ASYNC CSV FLUSH THREAD
self.start_csv_flush_thread()
@ -1163,7 +1231,7 @@ class DataStreamer:
"recording_protection": True,
"performance_monitoring": True,
"async_csv_buffering": True,
"csv_flush_interval": self.csv_flush_interval
"csv_flush_interval": self.csv_flush_interval,
},
)
return True
@ -1171,13 +1239,13 @@ class DataStreamer:
def stop_csv_recording(self):
"""🔥 CRITICAL: Stop CSV recording safely with performance monitoring and async flush"""
self.csv_recording_enabled = False
# 🚨 STOP ASYNC CSV FLUSH THREAD AND FLUSH REMAINING DATA
self.stop_csv_flush_thread()
# 🔥 DISABLE RECORDING PROTECTION MODE
self.recording_protector.stop_recording_protection()
# 📊 STOP PERFORMANCE MONITORING
self.performance_monitor.stop_monitoring()
@ -1201,10 +1269,7 @@ class DataStreamer:
"info",
"csv_recording_stopped",
"🔥 CRITICAL: CSV recording stopped (dataset threads continue for UDP streaming)",
{
"recording_protection": False,
"performance_monitoring": False
}
{"recording_protection": False, "performance_monitoring": False},
)
# 🔑 NEW: UDP Streaming Methods (Manual)
@ -1444,7 +1509,9 @@ class DataStreamer:
"csv_recording_enabled": self.csv_recording_enabled,
# 📊 PERFORMANCE STATS
"performance_current": self.performance_monitor.get_current_stats(),
"performance_historical": self.performance_monitor.get_historical_stats(windows=6), # Last minute
"performance_historical": self.performance_monitor.get_historical_stats(
windows=6
), # Last minute
}
return stats
@ -1457,14 +1524,14 @@ class DataStreamer:
"error": "API rate limit exceeded - protecting recording operations",
"error_type": "rate_limited",
"message": "Too many API requests - recording operations have priority",
"retry_after": 1.0
"retry_after": 1.0,
}
# Use background thread for cache access to not block recording
future = self.priority_manager.submit_api_task(
self.get_cached_dataset_values, dataset_id
)
try:
return future.result(timeout=2.0) # 2 second timeout
except Exception as e:
@ -1472,7 +1539,7 @@ class DataStreamer:
"success": False,
"error": f"API timeout or error: {str(e)}",
"error_type": "api_timeout",
"message": "API request timed out - recording operations have priority"
"message": "API request timed out - recording operations have priority",
}
def perform_csv_cleanup_safe(self):
@ -1480,9 +1547,11 @@ class DataStreamer:
if self.csv_recording_enabled:
# Don't run cleanup while recording is active - protect recording
if self.logger:
self.logger.info("Skipping CSV cleanup - recording is active (protecting recording operations)")
self.logger.info(
"Skipping CSV cleanup - recording is active (protecting recording operations)"
)
return
# Run cleanup in background thread
self.priority_manager.submit_background_task(self.perform_csv_cleanup)
@ -1669,48 +1738,52 @@ class DataStreamer:
"""🔑 CRITICAL: Safely shutdown all streaming operations with priority protection and performance monitoring"""
if self.logger:
self.logger.info("🔥 CRITICAL: Starting safe shutdown of data streamer...")
try:
# 1. Stop performance monitoring first
self.performance_monitor.stop_monitoring()
# 2. Stop CSV recording first (graceful stop with buffer flush)
if self.csv_recording_enabled:
self.stop_csv_recording()
else:
# Ensure CSV flush thread is stopped even if recording wasn't active
self.stop_csv_flush_thread()
# 3. Stop UDP streaming
if self.udp_streaming_enabled:
self.stop_udp_streaming()
# 4. Stop all dataset streaming threads using priority manager
active_datasets = list(self.dataset_threads.keys())
for dataset_id in active_datasets:
self.stop_dataset_streaming(dataset_id)
# 5. Shutdown priority manager (will wait for all recording threads)
self.priority_manager.shutdown()
# 6. Clear all cached data
self.clear_cached_values()
# 7. Close any remaining files
for dataset_id in list(self.dataset_csv_files.keys()):
try:
self.dataset_csv_files[dataset_id].close()
except:
pass
self.dataset_csv_files.clear()
self.dataset_csv_writers.clear()
self.dataset_csv_hours.clear()
if self.logger:
self.logger.info("🔥 CRITICAL: Data streamer shutdown completed successfully")
self.logger.info(
"🔥 CRITICAL: Data streamer shutdown completed successfully"
)
except Exception as e:
if self.logger:
self.logger.error(f"🚨 CRITICAL ERROR: Error during data streamer shutdown: {e}")
self.logger.error(
f"🚨 CRITICAL ERROR: Error during data streamer shutdown: {e}"
)
raise

View File

@ -1,4 +1,5 @@
import React, { useState, useEffect, useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import {
Box,
Card,
@ -280,6 +281,7 @@ function CollapsiblePlotItemsForm({ data, schema, uiSchema, onSave, title, icon,
* Adapted from PlotManager for historical data analysis
*/
export default function PlotHistoricalManager() {
const { t } = useTranslation()
const [plotDefinitions, setPlotDefinitions] = useState({ plots: [] })
const [plotVariables, setPlotVariables] = useState({ variables: [] })
const [plotDefinitionsSchema, setPlotDefinitionsSchema] = useState(null)
@ -523,8 +525,8 @@ export default function PlotHistoricalManager() {
setHistoricalSessions(prev => [...prev, newSession])
toast({
title: "Historical Plot Created",
description: `Created historical plot: ${plotDef.name}`,
title: t('historical.plotCreated'),
description: `${t('historical.createdDescription')}: ${plotDef.name}`,
status: "success",
duration: 3000,
isClosable: true
@ -534,7 +536,7 @@ export default function PlotHistoricalManager() {
console.error('Error creating historical plot:', error)
toast({
title: "Creation Error",
description: error.message || "Failed to create historical plot",
description: error.message || t('historical.createFailed'),
status: "error",
duration: 5000,
isClosable: true
@ -547,8 +549,8 @@ export default function PlotHistoricalManager() {
const removeHistoricalSession = (sessionId) => {
setHistoricalSessions(prev => prev.filter(s => s.id !== sessionId))
toast({
title: "Session Removed",
description: "Historical plot session removed",
title: t('historical.sessionRemoved'),
description: t('historical.sessionRemovedDescription'),
status: "info",
duration: 2000,
isClosable: true
@ -574,9 +576,9 @@ export default function PlotHistoricalManager() {
<Flex align="center">
<HStack>
<Text fontSize="2xl">📊</Text>
<Heading size="lg">Historical Plot Manager</Heading>
<Heading size="lg">{t('historical.manager')}</Heading>
<Badge colorScheme="purple" variant="subtle">
CSV Data Analysis
{t('historical.csvAnalysis')}
</Badge>
</HStack>
<Spacer />
@ -587,14 +589,14 @@ export default function PlotHistoricalManager() {
isLoading={isLoading}
variant="ghost"
>
🔄 Refresh Config
🔄 {t('historical.refreshConfig')}
</Button>
<Button
size="sm"
onClick={loadAvailableVariables}
variant="ghost"
>
🔄 Refresh Variables
🔄 {t('historical.refreshVariables')}
</Button>
<Button
size="sm"
@ -605,7 +607,7 @@ export default function PlotHistoricalManager() {
}}
variant="ghost"
>
🔍 Debug Data
🔍 {t('historical.debugData')}
</Button>
</HStack>
</Flex>
@ -615,14 +617,14 @@ export default function PlotHistoricalManager() {
{/* Plot Creation */}
<Card bg={cardBgColor}>
<CardHeader>
<Heading size="md">🎯 Create Historical Plot</Heading>
<Heading size="md">🎯 {t('historical.createPlot')}</Heading>
</CardHeader>
<CardBody>
<HStack spacing={4} align="end">
<FormControl>
<FormLabel fontSize="sm">Select Plot Definition</FormLabel>
<FormLabel fontSize="sm">{t('historical.selectPlotDefinition')}</FormLabel>
<Select
placeholder="Choose a plot..."
placeholder={t('historical.choosePlot')}
value={selectedPlotId}
onChange={(e) => setSelectedPlotId(e.target.value)}
size="sm"
@ -641,7 +643,7 @@ export default function PlotHistoricalManager() {
isDisabled={!selectedPlotId}
size="sm"
>
📈 Create Plot
📈 {t('historical.createButton')}
</Button>
</HStack>
</CardBody>
@ -653,9 +655,9 @@ export default function PlotHistoricalManager() {
<Flex align="center">
<HStack>
<Text fontSize="lg">📈</Text>
<Heading size="md">Historical Plot Sessions</Heading>
<Heading size="md">{t('historical.sessions')}</Heading>
<Badge colorScheme="blue" variant="subtle">
{historicalSessions.length} active
{historicalSessions.length} {t('historical.active')}
</Badge>
</HStack>
<Spacer />
@ -665,7 +667,7 @@ export default function PlotHistoricalManager() {
onClick={() => setIsSessionsExpanded(!isSessionsExpanded)}
leftIcon={isSessionsExpanded ? <ChevronUpIcon /> : <ChevronDownIcon />}
>
{isSessionsExpanded ? 'Collapse' : 'Expand'}
{isSessionsExpanded ? t('ui.collapse') : t('ui.expand')}
</Button>
</Flex>
</CardHeader>
@ -687,10 +689,10 @@ export default function PlotHistoricalManager() {
) : (
<Box textAlign="center" py={8}>
<Text color="gray.500" mb={3}>
No historical plot sessions active.
{t('historical.noSessions')}
</Text>
<Text fontSize="sm" color="gray.400">
Select a plot definition and create your first historical plot to analyze CSV data.
{t('historical.noSessionsHelp')}
</Text>
</Box>
)}

View File

@ -1,4 +1,5 @@
import React, { useEffect, useRef, useState, useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import {
Box,
VStack,
@ -58,10 +59,11 @@ import * as api from '../services/api'
*/
export default function PlotHistoricalSession({
session,
onRemove,
availableVariables = [],
booleanVariables = []
onRemove,
availableVariables = [],
booleanVariables = []
}) {
const { t } = useTranslation();
const [isLoading, setIsLoading] = useState(false)
const [error, setError] = useState(null)
const [historicalData, setHistoricalData] = useState([])
@ -632,8 +634,8 @@ export default function PlotHistoricalSession({
<Box mt={2} p={2} bg={infoBgColor} borderRadius="md" border="1px solid" borderColor={borderColor}>
<HStack justify="space-between" fontSize="sm" color={textColor}>
<Text><strong>Range:</strong> {timeRangeSeconds}s</Text>
<Text><strong>From:</strong> {formatCentralTimeInfo().start}</Text>
<Text><strong>To:</strong> {formatCentralTimeInfo().end}</Text>
<Text><strong>{t('timeSelector.from')}:</strong> {formatCentralTimeInfo().start}</Text>
<Text><strong>{t('timeSelector.to')}:</strong> {formatCentralTimeInfo().end}</Text>
</HStack>
</Box>
</Box>
@ -666,7 +668,7 @@ export default function PlotHistoricalSession({
<Text color={textColor}>{dataStats.loadTime}ms</Text>
</Box>
<Box>
<Text fontWeight="medium" color={textColor}>Time Range:</Text>
<Text fontWeight="medium" color={textColor}>{t('timeSelector.timeRange')}:</Text>
<Text color={textColor}>{formatTimeRange(dataStats.timeRange)}</Text>
</Box>
</Grid>
@ -727,7 +729,7 @@ export default function PlotHistoricalSession({
{/* Time Range Configuration */}
<Box>
<Text fontWeight="medium" mb={3}>📅 Time Range</Text>
<Text fontWeight="medium" mb={3}>📅 {t('timeSelector.timeRange')}</Text>
<Grid templateColumns="1fr 1fr" gap={4}>
<FormControl>
<FormLabel fontSize="sm">Central Time</FormLabel>
@ -946,8 +948,8 @@ export default function PlotHistoricalSession({
<Box px={3} py={1} bg={highlightBg} borderRadius="md" fontSize="xs">
<HStack spacing={4}>
<Text><strong>Range:</strong> {timeRangeSeconds}s</Text>
<Text><strong>From:</strong> {formatCentralTimeInfo().start}</Text>
<Text><strong>To:</strong> {formatCentralTimeInfo().end}</Text>
<Text><strong>{t('timeSelector.from')}:</strong> {formatCentralTimeInfo().start}</Text>
<Text><strong>{t('timeSelector.to')}:</strong> {formatCentralTimeInfo().end}</Text>
</HStack>
</Box>
</HStack>

View File

@ -1,4 +1,5 @@
import { useMemo, useState, useCallback, useRef, useEffect } from "react";
import { useTranslation } from 'react-i18next';
import { Box, Flex, Text, Slider, SliderTrack, SliderFilledTrack, SliderThumb, Button, IconButton, useColorModeValue, NumberInput, NumberInputField, NumberInputStepper, NumberIncrementStepper, NumberDecrementStepper } from "@chakra-ui/react";
import { CheckIcon } from "@chakra-ui/icons";
import DatePicker from "react-datepicker";
@ -14,6 +15,7 @@ export default function TimePointSelector({
dataSegments = [],
onTimeChange,
}) {
const { t } = useTranslation();
// Color mode values
const bgColor = useColorModeValue('gray.50', 'gray.700');
const borderColor = useColorModeValue('gray.200', 'gray.600');
@ -163,7 +165,7 @@ export default function TimePointSelector({
<Box p={4} bg={bgColor} borderRadius="md" border="1px solid" borderColor={borderColor}>
<Flex gap={4} align="center" mb={3} wrap="wrap">
<Box>
<Text fontWeight="semibold" mb={1} color={textColor}>Select Date and Time</Text>
<Text fontWeight="semibold" mb={1} color={textColor}>{t('timeSelector.selectDateTime')}</Text>
<Box
sx={{
'& .react-datepicker-wrapper': {
@ -252,7 +254,7 @@ export default function TimePointSelector({
</Box>
<Box flex="1" minW="260px">
<Text color={textColor} mb={1}>Navigate with slider</Text>
<Text color={textColor} mb={1}>{t('timeSelector.navigateSlider')}</Text>
{/* Slider with integrated data availability */}
<Box position="relative" mb={2}>
@ -302,7 +304,7 @@ export default function TimePointSelector({
</Text>
{hasPendingChanges && (
<Text fontSize="xs" color="orange.500">
Pending changes
{t('timeSelector.pendingChanges')}
</Text>
)}
</Flex>
@ -311,7 +313,7 @@ export default function TimePointSelector({
<Flex mt={3} gap={3} align="center" wrap="wrap">
<Box>
<Text fontSize="sm" fontWeight="medium" color={textColor} mb={1}>
Time Range (seconds)
{t('timeSelector.timeRangeSeconds')}
</Text>
<NumberInput
value={tempRangeSeconds}
@ -332,7 +334,7 @@ export default function TimePointSelector({
{hasPendingChanges && (
<Box>
<Text fontSize="sm" color="orange.500" mb={1}>
Pending changes
{t('timeSelector.pendingChanges')}
</Text>
<Button
size="sm"
@ -340,7 +342,7 @@ export default function TimePointSelector({
leftIcon={<CheckIcon />}
onClick={applyPendingChanges}
>
Apply Changes
{t('timeSelector.applyChanges')}
</Button>
</Box>
)}

View File

@ -100,5 +100,63 @@
"refresh": "Refresh",
"noLogs": "No logs available",
"loading": "Loading logs..."
},
"historical": {
"title": "Historical Plots",
"manager": "Historical Plot Manager",
"csvAnalysis": "CSV Data Analysis",
"createPlot": "Create Historical Plot",
"selectPlotDefinition": "Select Plot Definition",
"choosePlot": "Choose a plot...",
"createButton": "Create Plot",
"sessions": "Historical Plot Sessions",
"active": "active",
"noSessions": "No historical plot sessions active.",
"noSessionsHelp": "Select a plot definition and create your first historical plot to analyze CSV data.",
"plotCreated": "Historical Plot Created",
"createdDescription": "Created historical plot",
"sessionRemoved": "Session Removed",
"sessionRemovedDescription": "Historical plot session removed",
"createFailed": "Failed to create historical plot",
"refreshConfig": "Refresh Config",
"refreshVariables": "Refresh Variables",
"debugData": "Debug Data"
},
"csvFiles": {
"title": "CSV Files",
"browser": "CSV File Browser",
"loading": "Loading files...",
"noFiles": "No files available",
"download": "Download",
"openWith": "Open with",
"plotJuggler": "PlotJuggler",
"fileSize": "File size",
"lastModified": "Last modified",
"records": "records"
},
"ui": {
"collapse": "Collapse",
"expand": "Expand",
"addNew": "Add New",
"addFirst": "Add First",
"items": "items",
"defined": "defined",
"yet": "yet",
"choose": "Choose",
"select": "Select"
},
"timeSelector": {
"selectDateTime": "Select Date and Time",
"navigateSlider": "Navigate with slider",
"timeRange": "Time Range",
"timeRangeSeconds": "Time Range (seconds)",
"pendingChanges": "Pending changes",
"applyChanges": "Apply Changes",
"from": "From",
"to": "To",
"range": "Range",
"duration": "Duration",
"time": "Time",
"date": "Date"
}
}

View File

@ -100,5 +100,63 @@
"refresh": "Actualizar",
"noLogs": "No hay registros disponibles",
"loading": "Cargando registros..."
},
"historical": {
"title": "Gráficos Históricos",
"manager": "Gestor de Gráficos Históricos",
"csvAnalysis": "Análisis de Datos CSV",
"createPlot": "Crear Gráfico Histórico",
"selectPlotDefinition": "Seleccionar Definición de Gráfico",
"choosePlot": "Elegir un gráfico...",
"createButton": "Crear Gráfico",
"sessions": "Sesiones de Gráficos Históricos",
"active": "activo",
"noSessions": "No hay sesiones de gráficos históricos activas.",
"noSessionsHelp": "Selecciona una definición de gráfico y crea tu primer gráfico histórico para analizar datos CSV.",
"plotCreated": "Gráfico Histórico Creado",
"createdDescription": "Gráfico histórico creado",
"sessionRemoved": "Sesión Eliminada",
"sessionRemovedDescription": "Sesión de gráfico histórico eliminada",
"createFailed": "Error al crear gráfico histórico",
"refreshConfig": "Actualizar Configuración",
"refreshVariables": "Actualizar Variables",
"debugData": "Datos de Depuración"
},
"csvFiles": {
"title": "Archivos CSV",
"browser": "Explorador de Archivos CSV",
"loading": "Cargando archivos...",
"noFiles": "No hay archivos disponibles",
"download": "Descargar",
"openWith": "Abrir con",
"plotJuggler": "PlotJuggler",
"fileSize": "Tamaño del archivo",
"lastModified": "Última modificación",
"records": "registros"
},
"ui": {
"collapse": "Colapsar",
"expand": "Expandir",
"addNew": "Agregar Nuevo",
"addFirst": "Agregar Primero",
"items": "elementos",
"defined": "definido",
"yet": "aún",
"choose": "Elegir",
"select": "Seleccionar"
},
"timeSelector": {
"selectDateTime": "Seleccionar Fecha y Hora",
"navigateSlider": "Navegar con control deslizante",
"timeRange": "Rango de Tiempo",
"timeRangeSeconds": "Rango de Tiempo (segundos)",
"pendingChanges": "Cambios pendientes",
"applyChanges": "Aplicar Cambios",
"from": "Desde",
"to": "Hasta",
"range": "Rango",
"duration": "Duración",
"time": "Hora",
"date": "Fecha"
}
}

View File

@ -100,5 +100,63 @@
"refresh": "Aggiorna",
"noLogs": "Nessun log disponibile",
"loading": "Caricamento log..."
},
"historical": {
"title": "Grafici Storici",
"manager": "Gestore Grafici Storici",
"csvAnalysis": "Analisi Dati CSV",
"createPlot": "Crea Grafico Storico",
"selectPlotDefinition": "Seleziona Definizione Grafico",
"choosePlot": "Scegli un grafico...",
"createButton": "Crea Grafico",
"sessions": "Sessioni Grafici Storici",
"active": "attivo",
"noSessions": "Nessuna sessione di grafici storici attiva.",
"noSessionsHelp": "Seleziona una definizione di grafico e crea il tuo primo grafico storico per analizzare i dati CSV.",
"plotCreated": "Grafico Storico Creato",
"createdDescription": "Grafico storico creato",
"sessionRemoved": "Sessione Rimossa",
"sessionRemovedDescription": "Sessione grafico storico rimossa",
"createFailed": "Errore nella creazione del grafico storico",
"refreshConfig": "Aggiorna Configurazione",
"refreshVariables": "Aggiorna Variabili",
"debugData": "Dati di Debug"
},
"csvFiles": {
"title": "File CSV",
"browser": "Browser File CSV",
"loading": "Caricamento file...",
"noFiles": "Nessun file disponibile",
"download": "Scarica",
"openWith": "Apri con",
"plotJuggler": "PlotJuggler",
"fileSize": "Dimensione file",
"lastModified": "Ultima modifica",
"records": "record"
},
"ui": {
"collapse": "Comprimi",
"expand": "Espandi",
"addNew": "Aggiungi Nuovo",
"addFirst": "Aggiungi Primo",
"items": "elementi",
"defined": "definito",
"yet": "ancora",
"choose": "Scegli",
"select": "Seleziona"
},
"timeSelector": {
"selectDateTime": "Seleziona Data e Ora",
"navigateSlider": "Naviga con cursore",
"timeRange": "Intervallo di Tempo",
"timeRangeSeconds": "Intervallo di Tempo (secondi)",
"pendingChanges": "Modifiche in sospeso",
"applyChanges": "Applica Modifiche",
"from": "Da",
"to": "A",
"range": "Intervallo",
"duration": "Durata",
"time": "Ora",
"date": "Data"
}
}

View File

@ -1723,6 +1723,7 @@ function DatasetManager() {
// Console Logs Display Component
function ConsoleLogsDisplay({ logs, loading, onRefresh }) {
const { t } = useTranslation()
// All hooks must be called at the top level
const cardBg = useColorModeValue('white', 'gray.700')
const logBg = useColorModeValue('gray.50', 'gray.800')
@ -1753,7 +1754,7 @@ function ConsoleLogsDisplay({ logs, loading, onRefresh }) {
<CardBody>
<Flex align="center" justify="center" py={4}>
<Spinner mr={3} />
<Text>Loading console logs...</Text>
<Text>Loading {t('logs.loading')}...</Text>
</Flex>
</CardBody>
</Card>
@ -1764,7 +1765,7 @@ function ConsoleLogsDisplay({ logs, loading, onRefresh }) {
<Card bg={cardBg}>
<CardHeader>
<Flex align="center">
<Heading size="md">📋 Console Logs</Heading>
<Heading size="md">📋 {t('logs.title')}</Heading>
<Spacer />
<HStack spacing={2}>
<Button
@ -1776,7 +1777,7 @@ function ConsoleLogsDisplay({ logs, loading, onRefresh }) {
📄 Export
</Button>
<Button size="sm" variant="outline" onClick={onRefresh}>
🔄 Refresh
🔄 {t('logs.refresh')}
</Button>
</HStack>
</Flex>
@ -1784,7 +1785,7 @@ function ConsoleLogsDisplay({ logs, loading, onRefresh }) {
<CardBody>
{(!logs || logs.length === 0) ? (
<Text textAlign="center" py={4} color="gray.500">
No console logs found
{t('logs.noLogs')}
</Text>
) : (
<Box
@ -1856,6 +1857,7 @@ export default function Dashboard() {
// Dashboard Content Component (separated to use context)
function DashboardContent() {
const { t } = useTranslation()
// Estado para configuración PLC
const [schemaData, setSchemaData] = useState(null)
const [formData, setFormData] = useState(null)
@ -1953,12 +1955,12 @@ function DashboardContent() {
<Tabs variant="enclosed" colorScheme="orange" defaultIndex={2}>
<TabList>
<Tab>🔧 Configuration</Tab>
<Tab>📊 Datasets</Tab>
<Tab>📈 Plotting</Tab>
<Tab>📉 Historical Plots</Tab>
<Tab>📁 CSV Files</Tab>
<Tab>📋 Console Logs</Tab>
<Tab>🔧 {t('config.title')}</Tab>
<Tab>📊 {t('datasets.title')}</Tab>
<Tab>📈 {t('plots.title')}</Tab>
<Tab>📉 {t('historical.title')}</Tab>
<Tab>📁 {t('csvFiles.title')}</Tab>
<Tab>📋 {t('logs.title')}</Tab>
</TabList>
<TabPanels>

View File

@ -1,9 +1,13 @@
{
"last_state": {
"should_connect": false,
"should_connect": true,
"should_stream": false,
"active_datasets": []
"active_datasets": [
"Fast",
"DAR"
]
},
"auto_recovery_enabled": true,
"last_update": "2025-08-17T11:41:21.785119"
"last_update": "2025-08-17T12:13:12.849060",
"plotjuggler_path": "C:\\Program Files\\PlotJuggler\\plotjuggler.exe"
}