diff --git a/application_events.json b/application_events.json
index 0e2a4d2..f8595c5 100644
--- a/application_events.json
+++ b/application_events.json
@@ -8219,8 +8219,258 @@
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:04:58.577892",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:19:49.627904",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:27:39.324986",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:43:47.182067",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:48:02.324605",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T11:55:16.082269",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:06:39.185973",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:10:04.492953",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:15:30.073387",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: DAR",
+ "details": {
+ "dataset_id": "DAR",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "gateway_phoenix"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:15:30.095330",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: Fast",
+ "details": {
+ "dataset_id": "Fast",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "fast"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:15:30.115826",
+ "level": "info",
+ "event_type": "csv_recording_started",
+ "message": "CSV recording started: 2 datasets activated",
+ "details": {
+ "activated_datasets": 2,
+ "total_datasets": 3
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:15:30.136928",
+ "level": "info",
+ "event_type": "plc_connection",
+ "message": "Successfully connected to PLC 10.1.33.11 and auto-started CSV recording for 3 datasets",
+ "details": {
+ "ip": "10.1.33.11",
+ "rack": 0,
+ "slot": 2,
+ "symbols_path": "C:/Users/migue/Downloads/symSAE452.asc",
+ "auto_started_recording": true,
+ "recording_datasets": 3,
+ "dataset_names": [
+ "test",
+ "Fast",
+ "DAR"
+ ]
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:15:42.775388",
+ "level": "info",
+ "event_type": "plot_session_created",
+ "message": "Plot session 'UR29' created and started",
+ "details": {
+ "session_id": "plot_1_1755339342774_2",
+ "variables": [
+ "UR29_Brix",
+ "UR29_ma",
+ "AUX Blink_1.0S",
+ "AUX Blink_1.6S"
+ ],
+ "time_window": 36,
+ "trigger_variable": null,
+ "auto_started": true
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:21:32.502577",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:21:32.569959",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: DAR",
+ "details": {
+ "dataset_id": "DAR",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "gateway_phoenix"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:21:32.581865",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: Fast",
+ "details": {
+ "dataset_id": "Fast",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "fast"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:21:32.594474",
+ "level": "info",
+ "event_type": "csv_recording_started",
+ "message": "CSV recording started: 2 datasets activated",
+ "details": {
+ "activated_datasets": 2,
+ "total_datasets": 3
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:30:07.191405",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:30:07.241616",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: DAR",
+ "details": {
+ "dataset_id": "DAR",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "gateway_phoenix"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:30:07.253448",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: Fast",
+ "details": {
+ "dataset_id": "Fast",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "fast"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:30:07.264199",
+ "level": "info",
+ "event_type": "csv_recording_started",
+ "message": "CSV recording started: 2 datasets activated",
+ "details": {
+ "activated_datasets": 2,
+ "total_datasets": 3
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:35:14.697258",
+ "level": "info",
+ "event_type": "application_started",
+ "message": "Application initialization completed successfully",
+ "details": {}
+ },
+ {
+ "timestamp": "2025-08-16T12:35:14.761107",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: DAR",
+ "details": {
+ "dataset_id": "DAR",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "gateway_phoenix"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:35:14.774962",
+ "level": "info",
+ "event_type": "dataset_activated",
+ "message": "Dataset activated: Fast",
+ "details": {
+ "dataset_id": "Fast",
+ "variables_count": 2,
+ "streaming_count": 2,
+ "prefix": "fast"
+ }
+ },
+ {
+ "timestamp": "2025-08-16T12:35:14.785650",
+ "level": "info",
+ "event_type": "csv_recording_started",
+ "message": "CSV recording started: 2 datasets activated",
+ "details": {
+ "activated_datasets": 2,
+ "total_datasets": 3
+ }
}
],
- "last_updated": "2025-08-16T10:01:50.009210",
- "total_entries": 685
+ "last_updated": "2025-08-16T12:35:14.785650",
+ "total_entries": 710
}
\ No newline at end of file
diff --git a/frontend/package.json b/frontend/package.json
index 1b3c722..7c38520 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -22,6 +22,7 @@
"framer-motion": "^11.2.12",
"luxon": "^2.5.2",
"react": "^18.2.0",
+ "react-datepicker": "^8.5.0",
"react-dom": "^18.2.0",
"react-icons": "^5.5.0",
"react-router-dom": "^6.26.1"
diff --git a/frontend/src/components/ChartjsHistoricalPlot.jsx b/frontend/src/components/ChartjsHistoricalPlot.jsx
index fb832e9..d22de40 100644
--- a/frontend/src/components/ChartjsHistoricalPlot.jsx
+++ b/frontend/src/components/ChartjsHistoricalPlot.jsx
@@ -110,6 +110,7 @@ const ChartjsHistoricalPlot = ({
const createOrUpdateChart = useCallback(() => {
if (!canvasRef.current || !historicalData || historicalData.length === 0) {
+ console.log('π Chart creation skipped - missing canvas or data');
return;
}
@@ -120,17 +121,40 @@ const ChartjsHistoricalPlot = ({
chartRef.current = null;
}
+ console.log(`π Processing ${historicalData.length} historical data points for variables:`, session?.variables);
+
// Process historical data into Chart.js format
const processedData = processHistoricalData(historicalData, session?.variables || []);
setDataPointsCount(historicalData.length);
if (processedData.datasets.length === 0) {
+ console.log('π No datasets created from data');
setError('No valid data to display');
return;
}
+ console.log(`π Created ${processedData.datasets.length} datasets:`, processedData.datasets.map(d => ({ label: d.label, points: d.data.length })));
+
+ // Log sample data for debugging
+ processedData.datasets.forEach((dataset, index) => {
+ if (dataset.data.length > 0) {
+ console.log(`π Dataset ${index} (${dataset.label}):`, {
+ totalPoints: dataset.data.length,
+ firstPoint: dataset.data[0],
+ lastPoint: dataset.data[dataset.data.length - 1],
+ samplePoints: dataset.data.slice(0, 3)
+ });
+ }
+ });
+
// Create chart configuration
const chartConfig = createChartConfig(processedData, config, isZoomEnabled);
+
+ console.log('π Chart config created:', {
+ datasetCount: chartConfig.data.datasets.length,
+ hasData: chartConfig.data.datasets.some(d => d.data.length > 0),
+ decimationEnabled: chartConfig.options.plugins.decimation?.enabled
+ });
// Create new chart
const ctx = canvasRef.current.getContext('2d');
@@ -151,6 +175,8 @@ const ChartjsHistoricalPlot = ({
}, [historicalData, session?.variables, config, isZoomEnabled]);
const processHistoricalData = (data, variables) => {
+ console.log(`π Processing data - Input: ${data.length} points, Variables: [${variables.join(', ')}]`);
+
const datasets = [];
const colors = [
'#FF6384', '#36A2EB', '#FFCE56', '#4BC0C0', '#9966FF',
@@ -167,12 +193,15 @@ const ChartjsHistoricalPlot = ({
y: point.value
}))
.sort((a, b) => a.x - b.x);
+
+ console.log(`π Variable ${variable}: ${variableData[variable].length} points after processing`);
});
// Create datasets for each variable
variables.forEach((variable, index) => {
const points = variableData[variable];
if (points && points.length > 0) {
+ console.log(`π Dataset for ${variable}: ${points.length} points`);
datasets.push({
label: variable,
data: points,
@@ -181,20 +210,18 @@ const ChartjsHistoricalPlot = ({
borderWidth: 2,
fill: false,
tension: 0.1,
- pointRadius: 0, // Always hide points - decimation will handle visualization
+ pointRadius: 0, // Let decimation handle point display
pointHoverRadius: 4,
pointBackgroundColor: colors[index % colors.length],
pointBorderColor: colors[index % colors.length],
- spanGaps: true,
- // Enable parsing for decimation
- parsing: {
- xAxisKey: 'x',
- yAxisKey: 'y'
- }
+ spanGaps: true
});
+ } else {
+ console.log(`π No data for variable: ${variable}`);
}
});
+ console.log(`π Final result: ${datasets.length} datasets created`);
return { datasets };
};
@@ -202,6 +229,12 @@ const ChartjsHistoricalPlot = ({
const minTime = currentTimeRange?.start ? new Date(currentTimeRange.start) : null;
const maxTime = currentTimeRange?.end ? new Date(currentTimeRange.end) : null;
+ console.log('π Chart time range:', {
+ minTime: minTime?.toISOString(),
+ maxTime: maxTime?.toISOString(),
+ currentTimeRange
+ });
+
const config = {
type: 'line',
data: data,
@@ -215,24 +248,6 @@ const ChartjsHistoricalPlot = ({
intersect: false,
mode: 'index'
},
- // Enable decimation for better performance with large datasets
- datasets: {
- line: {
- parsing: {
- xAxisKey: 'x',
- yAxisKey: 'y'
- }
- }
- },
- elements: {
- point: {
- radius: function(context) {
- // Hide points if there are many data points
- const dataset = context.dataset;
- return dataset && dataset.data && dataset.data.length > 1000 ? 0 : 2;
- }
- }
- },
plugins: {
// Configure decimation for better performance with large datasets
decimation: {
@@ -416,15 +431,10 @@ const ChartjsHistoricalPlot = ({
{isZoomEnabled && (
diff --git a/frontend/src/components/PlotHistoricalSession.jsx b/frontend/src/components/PlotHistoricalSession.jsx
index 331f660..6301a60 100644
--- a/frontend/src/components/PlotHistoricalSession.jsx
+++ b/frontend/src/components/PlotHistoricalSession.jsx
@@ -47,6 +47,7 @@ import {
} from '@chakra-ui/react'
import { SettingsIcon, RepeatIcon, ViewIcon, DeleteIcon, TimeIcon, CalendarIcon } from '@chakra-ui/icons'
import ChartjsHistoricalPlot from './ChartjsHistoricalPlot.jsx'
+import TimePointSelector from './TimePointSelector.jsx'
import * as api from '../services/api'
/**
@@ -70,6 +71,23 @@ export default function PlotHistoricalSession({
loadTime: null
})
+ // NEW: Time navigation state (centro + rango en segundos)
+ const [centralTime, setCentralTime] = useState(() => {
+ // Default: 10 minutos atrΓ‘s desde ahora
+ return new Date(Date.now() - 10 * 60 * 1000)
+ })
+ const [timeRangeSeconds, setTimeRangeSeconds] = useState(1000) // 500 seg atrΓ‘s + 500 seg adelante
+ const [dateRange, setDateRange] = useState(null) // Min/max dates disponibles del backend
+
+ // Derived time range for data loading
+ const derivedTimeRange = useMemo(() => {
+ const halfRange = timeRangeSeconds / 2
+ return {
+ start: new Date(centralTime.getTime() - halfRange * 1000),
+ end: new Date(centralTime.getTime() + halfRange * 1000)
+ }
+ }, [centralTime, timeRangeSeconds])
+
// UI state
const [isExpanded, setIsExpanded] = useState(true)
const [isConfigOpen, setIsConfigOpen] = useState(false)
@@ -89,30 +107,48 @@ export default function PlotHistoricalSession({
return fallback
}
- const [localTimeRange, setLocalTimeRange] = useState(() => {
- const defaultStart = new Date(Date.now() - 24 * 60 * 60 * 1000)
- const defaultEnd = new Date()
-
- if (session.timeRange) {
- return {
- start: ensureValidDate(session.timeRange.start, defaultStart),
- end: ensureValidDate(session.timeRange.end, defaultEnd)
- }
- }
-
- return { start: defaultStart, end: defaultEnd }
- })
-
const toast = useToast()
const { isOpen: isConfigModalOpen, onOpen: onConfigModalOpen, onClose: onConfigModalClose } = useDisclosure()
// Keep track of the last loaded data range for optimization
const [loadedDataRange, setLoadedDataRange] = useState(null)
- // Load historical data on component mount and when time range changes
+ // Load date range from backend on mount
useEffect(() => {
- loadHistoricalData()
- }, [session.id, localTimeRange])
+ loadDateRange()
+ }, [])
+
+ // Load historical data when derived time range changes
+ useEffect(() => {
+ if (dateRange) { // Only load after we have date range
+ loadHistoricalData()
+ }
+ }, [session.id, derivedTimeRange, dateRange])
+
+ const loadDateRange = async () => {
+ try {
+ const response = await api.getHistoricalDateRange()
+ if (response.success) {
+ const minDate = new Date(response.date_range.min_date)
+ const maxDate = new Date(response.date_range.max_date)
+ setDateRange({ minDate, maxDate })
+
+ // Adjust central time if it's outside the available range
+ const currentTime = centralTime.getTime()
+ const minTime = minDate.getTime()
+ const maxTime = maxDate.getTime()
+
+ if (currentTime < minTime || currentTime > maxTime) {
+ // Set to middle of available range
+ const middleTime = new Date((minTime + maxTime) / 2)
+ setCentralTime(middleTime)
+ }
+ }
+ } catch (error) {
+ console.error('Error loading date range:', error)
+ setError('Could not load available date range')
+ }
+ }
// Function to check if a range is contained within another range
const isRangeContained = (newRange, existingRange) => {
@@ -132,9 +168,9 @@ export default function PlotHistoricalSession({
}
// Check if the new range is contained within the previously loaded range
- if (!forceReload && loadedDataRange && isRangeContained(localTimeRange, loadedDataRange)) {
+ if (!forceReload && loadedDataRange && isRangeContained(derivedTimeRange, loadedDataRange)) {
console.log('π Zoom optimization: New range is contained within loaded data, skipping reload')
- console.log('π New range:', localTimeRange)
+ console.log('π New range:', derivedTimeRange)
console.log('π Loaded range:', loadedDataRange)
return
}
@@ -147,13 +183,13 @@ export default function PlotHistoricalSession({
const startTime = performance.now()
// Calculate time window in seconds
- const timeWindowSeconds = Math.floor((localTimeRange.end - localTimeRange.start) / 1000)
+ const timeWindowSeconds = Math.floor((derivedTimeRange.end - derivedTimeRange.start) / 1000)
const requestData = {
variables: session.variables,
time_window: timeWindowSeconds,
- start_time: localTimeRange.start.toISOString(),
- end_time: localTimeRange.end.toISOString()
+ start_time: derivedTimeRange.start.toISOString(),
+ end_time: derivedTimeRange.end.toISOString()
}
console.log('π Loading historical data for session:', session.id)
@@ -174,12 +210,21 @@ export default function PlotHistoricalSession({
const loadTime = Math.round(endTime - startTime)
if (response.data) {
+ console.log('π Historical data response:', {
+ dataLength: response.data.length,
+ totalPoints: response.total_points,
+ variablesFound: response.variables_found,
+ timeRange: response.time_range,
+ cached: response.cached,
+ sampleData: response.data.slice(0, 3) // First 3 points for debug
+ })
+
setHistoricalData(response.data)
// Update the loaded data range for optimization
setLoadedDataRange({
- start: new Date(localTimeRange.start),
- end: new Date(localTimeRange.end)
+ start: new Date(derivedTimeRange.start),
+ end: new Date(derivedTimeRange.end)
})
setDataStats({
@@ -193,16 +238,10 @@ export default function PlotHistoricalSession({
points: response.data.length,
variables: response.variables_found,
loadTime,
- loadedRange: localTimeRange
+ loadedRange: derivedTimeRange
})
- toast({
- title: "Data Loaded",
- description: `Loaded ${response.data.length} data points in ${loadTime}ms`,
- status: "success",
- duration: 2000,
- isClosable: true
- })
+ // Removed toast notification to avoid spam during slider changes
} else {
setError('No data received from server')
}
@@ -210,23 +249,22 @@ export default function PlotHistoricalSession({
} catch (error) {
console.error('Error loading historical data:', error)
setError(error.message || 'Failed to load historical data')
- toast({
- title: "Data Load Error",
- description: error.message || "Failed to load historical data",
- status: "error",
- duration: 5000,
- isClosable: true
- })
+ // Only show error toast for actual errors, not for routine data loading
+ if (!error.message?.includes('contained within loaded data')) {
+ toast({
+ title: "Data Load Error",
+ description: error.message || "Failed to load historical data",
+ status: "error",
+ duration: 5000,
+ isClosable: true
+ })
+ }
} finally {
setIsLoading(false)
setLoadingProgress(0)
}
}
- const handleTimeRangeChange = (newTimeRange) => {
- setLocalTimeRange(newTimeRange)
- }
-
const handleConfigSave = () => {
setConfig({ ...config })
onConfigModalClose()
@@ -238,31 +276,47 @@ export default function PlotHistoricalSession({
const handleZoomToTimeRange = (start, end) => {
console.log('π Zoom event - evaluating range:', { start, end })
- const newRange = { start: new Date(start), end: new Date(end) }
+ const newStart = new Date(start)
+ const newEnd = new Date(end)
- // Check if the new range is contained within the loaded data
- if (loadedDataRange && isRangeContained(newRange, loadedDataRange)) {
- console.log('π Zoom optimization: Range contained in loaded data, skipping reload')
- setLocalTimeRange(newRange)
- } else {
- console.log('π Zoom requires data reload - new range outside loaded data')
- setLocalTimeRange(newRange)
- }
+ // Calculate new central time and range from zoom
+ const newCentralTime = new Date((newStart.getTime() + newEnd.getTime()) / 2)
+ const newRangeSeconds = Math.floor((newEnd.getTime() - newStart.getTime()) / 1000)
+
+ console.log('π New central time:', newCentralTime, 'Range seconds:', newRangeSeconds)
+
+ // Update time navigation state
+ setCentralTime(newCentralTime)
+ setTimeRangeSeconds(newRangeSeconds)
}
const handlePanToTimeRange = (start, end) => {
console.log('π Pan event - evaluating range:', { start, end })
- const newRange = { start: new Date(start), end: new Date(end) }
+ const newStart = new Date(start)
+ const newEnd = new Date(end)
- // Pan always requires checking if we need new data
- console.log('π Pan event - loading data for range:', newRange)
- setLocalTimeRange(newRange)
+ // Calculate new central time (keep same range)
+ const newCentralTime = new Date((newStart.getTime() + newEnd.getTime()) / 2)
+
+ console.log('π Pan to central time:', newCentralTime)
+
+ // Update only central time, keep same range
+ setCentralTime(newCentralTime)
+ }
+
+ // Handle time change from TimePointSelector
+ const handleTimePointChange = (newCentralTime) => {
+ console.log('π Time selector change:', newCentralTime)
+ setCentralTime(newCentralTime)
}
// Color mode
const bgColor = useColorModeValue('white', 'gray.800')
const borderColor = useColorModeValue('gray.200', 'gray.600')
const textColor = useColorModeValue('gray.600', 'gray.300')
+ const infoBgColor = useColorModeValue('gray.50', 'gray.700')
+ const subtleTextColor = useColorModeValue('gray.500', 'gray.400')
+ const smallTextColor = useColorModeValue('gray.400', 'gray.500')
// Format time range for display
const formatTimeRange = (timeRange) => {
@@ -272,6 +326,38 @@ export default function PlotHistoricalSession({
return `${start} β ${end}`
}
+ // Format central time and range for display
+ const formatCentralTimeInfo = () => {
+ const halfRange = timeRangeSeconds / 2
+ return {
+ central: centralTime.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ }),
+ range: `Β±${halfRange}s (${timeRangeSeconds}s total)`,
+ start: derivedTimeRange.start.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ }),
+ end: derivedTimeRange.end.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ })
+ }
+ }
+
// Get status color
const getStatusColor = () => {
if (isLoading) return 'yellow'
@@ -297,9 +383,13 @@ export default function PlotHistoricalSession({
)}
+
+
+ Centro: {formatCentralTimeInfo().central}
+
- {formatTimeRange(dataStats.timeRange || localTimeRange)}
+ {formatCentralTimeInfo().range}
π
@@ -376,6 +466,26 @@ export default function PlotHistoricalSession({
{isExpanded && (
+ {/* Time Navigation Controls */}
+ {dateRange && (
+
+
+
+
+ Rango: {timeRangeSeconds}s
+ Desde: {formatCentralTimeInfo().start}
+ Hasta: {formatCentralTimeInfo().end}
+
+
+
+ )}
+
{error && (
@@ -387,24 +497,24 @@ export default function PlotHistoricalSession({
)}
{showDataPreview && dataStats.totalPoints > 0 && (
-
- π Data Summary
+
+ π Data Summary
- Total Points:
- {dataStats.totalPoints.toLocaleString()}
+ Total Points:
+ {dataStats.totalPoints.toLocaleString()}
- Variables Found:
- {dataStats.variablesFound.join(', ')}
+ Variables Found:
+ {dataStats.variablesFound.join(', ')}
- Load Time:
- {dataStats.loadTime}ms
+ Load Time:
+ {dataStats.loadTime}ms
- Time Range:
- {formatTimeRange(dataStats.timeRange)}
+ Time Range:
+ {formatTimeRange(dataStats.timeRange)}
@@ -415,7 +525,7 @@ export default function PlotHistoricalSession({
-
+
Loading historical data...
-
+
{loadingProgress}% complete
@@ -454,34 +564,32 @@ export default function PlotHistoricalSession({
π
Time Range
- Start Time
+ Central Time
setLocalTimeRange(prev => ({
- ...prev,
- start: new Date(e.target.value)
- }))}
+ onChange={(e) => setCentralTime(new Date(e.target.value))}
size="sm"
/>
- End Time
- setLocalTimeRange(prev => ({
- ...prev,
- end: new Date(e.target.value)
- }))}
+ Range (seconds)
+ setTimeRangeSeconds(parseInt(valueStr) || 1000)}
+ min={60}
+ max={86400}
size="sm"
- />
+ >
+
+
+
+
+
+
diff --git a/frontend/src/components/TimePointSelector.jsx b/frontend/src/components/TimePointSelector.jsx
new file mode 100644
index 0000000..2280d3b
--- /dev/null
+++ b/frontend/src/components/TimePointSelector.jsx
@@ -0,0 +1,203 @@
+import { useMemo, useState, useCallback, useRef, useEffect } from "react";
+import { Box, Flex, Text, Slider, SliderTrack, SliderFilledTrack, SliderThumb, useColorModeValue } from "@chakra-ui/react";
+import DatePicker from "react-datepicker";
+import "react-datepicker/dist/react-datepicker.css";
+
+export default function TimePointSelector({
+ minDate,
+ maxDate,
+ initial,
+ stepMinutes = 5,
+ onTimeChange,
+}) {
+ // Color mode values
+ const bgColor = useColorModeValue('gray.50', 'gray.700');
+ const borderColor = useColorModeValue('gray.200', 'gray.600');
+ const textColor = useColorModeValue('gray.800', 'gray.200');
+
+ // Valores numΓ©ricos en ms para el slider
+ const [minMs, maxMs] = useMemo(() => [minDate.getTime(), maxDate.getTime()], [minDate, maxDate]);
+ const stepMs = useMemo(() => stepMinutes * 60 * 1000, [stepMinutes]);
+
+ // Estado ΓΊnico (Date)
+ const [value, setValue] = useState(() => {
+ // clamp al rango por si initial cae fuera
+ const t = initial.getTime();
+ return new Date(Math.min(Math.max(t, minMs), maxMs));
+ });
+
+ const valueMs = value.getTime();
+
+ // Cooldown para evitar mΓΊltiples solicitudes
+ const cooldownRef = useRef(null);
+ const lastCallbackValueRef = useRef(null);
+
+ // Redondea al paso del slider
+ const snapToStep = useCallback((ms) => {
+ const snapped = Math.round(ms / stepMs) * stepMs;
+ return Math.min(Math.max(snapped, minMs), maxMs);
+ }, [stepMs, minMs, maxMs]);
+
+ // FunciΓ³n con cooldown para llamar al callback
+ const debouncedOnTimeChange = useCallback((newValue) => {
+ // Si ya hay un timer, cancelarlo
+ if (cooldownRef.current) {
+ clearTimeout(cooldownRef.current);
+ }
+
+ // Guardar el valor actual para llamar al callback despuΓ©s del cooldown
+ lastCallbackValueRef.current = newValue;
+
+ // Establecer nuevo timer
+ cooldownRef.current = setTimeout(() => {
+ if (onTimeChange && lastCallbackValueRef.current) {
+ console.log('π TimeSelector: Calling onChange after cooldown', lastCallbackValueRef.current);
+ onTimeChange(lastCallbackValueRef.current);
+ }
+ cooldownRef.current = null;
+ }, 1000); // 1 segundo de cooldown
+ }, [onTimeChange]);
+
+ // Cleanup del timer al desmontar
+ useEffect(() => {
+ return () => {
+ if (cooldownRef.current) {
+ clearTimeout(cooldownRef.current);
+ }
+ };
+ }, []);
+
+ // Cambio desde el DatePicker (sin cooldown, cambio directo)
+ const onPick = (d) => {
+ if (!d) return;
+ const newValue = new Date(snapToStep(d.getTime()));
+ setValue(newValue);
+
+ // DatePicker no necesita cooldown, es cambio directo
+ if (onTimeChange) {
+ console.log('π TimeSelector: DatePicker change (immediate)', newValue);
+ onTimeChange(newValue);
+ }
+ };
+
+ // Cambio desde el Slider (con cooldown)
+ const onSlide = (ms) => {
+ const newValue = new Date(ms);
+ setValue(newValue);
+
+ // Usar cooldown para el slider
+ debouncedOnTimeChange(newValue);
+ };
+
+ return (
+
+
+
+ Seleccionar fecha y hora
+
+
+
+
+
+
+ Navegar con slider
+
+
+
+
+
+
+
+ {value.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ })}
+
+
+
+
+
+ Rango: {minDate.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ })} β {maxDate.toLocaleString('es-ES', {
+ day: '2-digit',
+ month: '2-digit',
+ year: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ hour12: false
+ })} | Paso: {stepMinutes} min
+
+
+ );
+}
diff --git a/frontend/src/services/api.js b/frontend/src/services/api.js
index c609345..2eb56b2 100644
--- a/frontend/src/services/api.js
+++ b/frontend/src/services/api.js
@@ -229,6 +229,14 @@ export async function getHistoricalData(requestData) {
return toJsonOrThrow(res)
}
+// Get available date range for historical data
+export async function getHistoricalDateRange() {
+ const res = await fetch(`${BASE_URL}/api/plots/historical/date-range`, {
+ headers: { 'Accept': 'application/json' }
+ })
+ return toJsonOrThrow(res)
+}
+
// Plot session status and control (aliases for existing functions)
export async function getPlotSession(sessionId) {
// Use existing getPlotConfig to get session info
diff --git a/main.py b/main.py
index e00f299..efcc629 100644
--- a/main.py
+++ b/main.py
@@ -8,9 +8,15 @@ from flask import (
from flask_cors import CORS
import json
import time
-from datetime import datetime
+from datetime import datetime, timedelta, timezone
import os
import sys
+import logging
+
+# Configure logging to show only errors for cleaner historical data logs
+logging.basicConfig(level=logging.ERROR)
+# Reduce Flask's request logging to ERROR level only
+logging.getLogger('werkzeug').setLevel(logging.ERROR)
try:
import tkinter as tk
@@ -1512,7 +1518,7 @@ def health_check():
@app.route("/api/plots", methods=["GET"])
def get_plots():
"""Get all plot sessions status"""
- print("π DEBUG: /api/plots endpoint called")
+ # print("π DEBUG: /api/plots endpoint called")
error_response = check_streamer_initialized()
if error_response:
@@ -1522,11 +1528,11 @@ def get_plots():
print("β
DEBUG: Streamer is initialized")
try:
- print("π DEBUG: Accessing streamer.data_streamer.plot_manager...")
+ # print("π DEBUG: Accessing streamer.data_streamer.plot_manager...")
plot_manager = streamer.data_streamer.plot_manager
print(f"β
DEBUG: Plot manager obtained: {type(plot_manager)}")
- print("π DEBUG: Calling get_all_sessions_status()...")
+ # print("π DEBUG: Calling get_all_sessions_status()...")
sessions = plot_manager.get_all_sessions_status()
print(f"β
DEBUG: Sessions obtained: {len(sessions)} sessions")
print(f"π DEBUG: Sessions data: {sessions}")
@@ -1879,11 +1885,10 @@ def get_historical_data():
# Import required modules
try:
- print("π DEBUG: Importing modules...")
+ # print("π DEBUG: Importing modules...")
import pandas as pd
import glob
- from datetime import timedelta
- print("π DEBUG: All imports successful")
+ # print("π DEBUG: All imports successful")
except ImportError as e:
print(f"β DEBUG: Import failed: {e}")
return jsonify({"error": f"pandas import failed: {str(e)}"}), 500
@@ -1898,17 +1903,21 @@ def get_historical_data():
# Calculate time range
try:
- print("π DEBUG: Calculating time range...")
+ # print("π DEBUG: Calculating time range...")
if start_time_param and end_time_param:
- start_time = datetime.fromisoformat(start_time_param.replace('Z', '+00:00'))
- end_time = datetime.fromisoformat(end_time_param.replace('Z', '+00:00'))
- # Convert to local timezone if needed
- if start_time.tzinfo:
- start_time = start_time.replace(tzinfo=None)
- if end_time.tzinfo:
- end_time = end_time.replace(tzinfo=None)
- print(f"π DEBUG: Using explicit time range: {start_time} to {end_time}")
+ # Parse timestamps from frontend (UTC) and convert to local time
+
+ # Parse as UTC timestamps (frontend sends them with 'Z')
+ start_time_utc = datetime.fromisoformat(start_time_param.replace('Z', '+00:00'))
+ end_time_utc = datetime.fromisoformat(end_time_param.replace('Z', '+00:00'))
+
+ # Convert to local time (remove timezone info since CSV data has no timezone)
+ start_time = start_time_utc.astimezone().replace(tzinfo=None)
+ end_time = end_time_utc.astimezone().replace(tzinfo=None)
+
+ print(f"π DEBUG: UTC timestamps: {start_time_utc} to {end_time_utc}")
+ print(f"π DEBUG: Local timestamps: {start_time} to {end_time}")
# Validate time range
if start_time >= end_time:
@@ -1947,10 +1956,11 @@ def get_historical_data():
csv_files.extend(glob.glob(os.path.join(folder_path, "*.csv")))
current_date += timedelta(days=1)
- print(f"π DEBUG: Found {len(csv_files)} CSV files for cache checking")
+ # print(f"π DEBUG: Found {len(csv_files)} CSV files for cache checking")
# Try to get data from cache first
- cached_data = historical_cache.get_cached_data(variables, start_time, end_time, csv_files)
+ # TEMPORARY: Disable cache to debug data display issues
+ cached_data = None # historical_cache.get_cached_data(variables, start_time, end_time, csv_files)
if cached_data is not None:
print("οΏ½ DEBUG: Cache hit! Returning cached data")
@@ -2010,10 +2020,12 @@ def get_historical_data():
date_folders.append(folder_path)
current_date += timedelta(days=1)
- print(f"π DEBUG: Processing {len(date_folders)} date folders with buffer")
+ # print(f"π DEBUG: Processing {len(date_folders)} date folders with buffer")
# Process CSV files and collect all data (including buffer)
- all_data_for_cache = []
+ # Use a dictionary to collect data by timestamp, then fill missing values with NaN
+ timestamp_data = {} # {timestamp: {variable: value}}
+ all_timestamps = set()
for folder_path in date_folders:
csv_files_in_folder = glob.glob(os.path.join(folder_path, "*.csv"))
@@ -2046,13 +2058,13 @@ def get_historical_data():
continue
# Convert timestamps
- df[timestamp_col] = pd.to_datetime(df[timestamp_col], errors="coerce")
+ df[timestamp_col] = pd.to_datetime(df[timestamp_col], errors='coerce')
df = df.dropna(subset=[timestamp_col])
if df.empty:
continue
- # Normalize column name
+ # Rename timestamp column for consistency
if timestamp_col != "timestamp":
df = df.rename(columns={timestamp_col: "timestamp"})
@@ -2069,42 +2081,63 @@ def get_historical_data():
if not matching_vars:
continue
- # Extract data for cache
+ # print(f"π DEBUG: File {csv_file} - Found {len(matching_vars)} matching variables: {matching_vars}")
+
+ # Extract data for each timestamp
for _, row in filtered_df.iterrows():
timestamp = row["timestamp"]
+ all_timestamps.add(timestamp)
+
+ if timestamp not in timestamp_data:
+ timestamp_data[timestamp] = {}
+
+ # Store data for available variables, others will be NaN
for var in matching_vars:
- if var in row and pd.notna(row[var]):
- try:
- value = row[var]
- # Type conversion
- if isinstance(value, str):
- value_lower = value.lower().strip()
- if value_lower == "true":
- value = True
- elif value_lower == "false":
- value = False
+ if var in row:
+ raw_value = row[var]
+ if pd.notna(raw_value):
+ try:
+ # Type conversion
+ if isinstance(raw_value, str):
+ value_lower = raw_value.lower().strip()
+ if value_lower == "true":
+ value = True
+ elif value_lower == "false":
+ value = False
+ else:
+ try:
+ value = float(raw_value)
+ except ValueError:
+ value = None
+ elif isinstance(raw_value, (int, float)):
+ value = float(raw_value)
else:
- try:
- value = float(value)
- except ValueError:
- continue
- elif isinstance(value, (int, float)):
- value = float(value)
- else:
- continue
-
- all_data_for_cache.append({
- "timestamp": timestamp,
- "variable": var,
- "value": value,
- })
- except:
- continue
+ value = None
+
+ timestamp_data[timestamp][var] = value
+ except:
+ timestamp_data[timestamp][var] = None
+ else:
+ timestamp_data[timestamp][var] = None
except Exception as e:
print(f"Warning: Could not read CSV file {csv_file}: {e}")
continue
+ # Convert to list format for DataFrame creation
+ all_data_for_cache = []
+ for timestamp in sorted(all_timestamps):
+ for var in variables:
+ # Get value or None if not available
+ value = timestamp_data[timestamp].get(var, None)
+ all_data_for_cache.append({
+ "timestamp": timestamp,
+ "variable": var,
+ "value": value,
+ })
+
+ print(f"π DEBUG: Collected {len(all_data_for_cache)} total data points from {len(all_timestamps)} timestamps for {len(variables)} variables")
+
# Convert to DataFrame for caching
if all_data_for_cache:
cache_df = pd.DataFrame(all_data_for_cache)
@@ -2118,11 +2151,18 @@ def get_historical_data():
response_df = cache_df[response_mask]
# Convert to response format
+ historical_data = []
for _, row in response_df.iterrows():
+ # Include all data points, even with None values (converted to null in JSON)
+ value = row["value"]
+ # Convert pandas NaN to None for proper JSON serialization
+ if pd.isna(value):
+ value = None
+
historical_data.append({
"timestamp": row["timestamp"].isoformat(),
"variable": row["variable"],
- "value": row["value"]
+ "value": value
})
print(f"π DEBUG: Loaded {len(historical_data)} data points for response")
@@ -2188,6 +2228,132 @@ def clear_cache():
return jsonify({"error": str(e)}), 500
+@app.route("/api/plots/historical/date-range", methods=["GET"])
+def get_historical_date_range():
+ """Get the available date range from CSV files"""
+ try:
+ import pandas as pd
+ import glob
+
+ # Get records directory
+ records_dir = os.path.join(os.path.dirname(__file__), "records")
+
+ if not os.path.exists(records_dir):
+ return jsonify({
+ "success": False,
+ "error": "No records directory found"
+ }), 404
+
+ # Find all date folders (format: DD-MM-YYYY)
+ date_folders = []
+ for item in os.listdir(records_dir):
+ folder_path = os.path.join(records_dir, item)
+ if os.path.isdir(folder_path):
+ try:
+ # Try to parse the folder name as a date
+ date_obj = datetime.strptime(item, "%d-%m-%Y")
+ date_folders.append((date_obj, folder_path))
+ except ValueError:
+ continue
+
+ if not date_folders:
+ return jsonify({
+ "success": False,
+ "error": "No valid date folders found"
+ }), 404
+
+ # Sort by date
+ date_folders.sort(key=lambda x: x[0])
+
+ # Get the earliest and latest dates
+ earliest_date = date_folders[0][0]
+ latest_date = date_folders[-1][0]
+
+ # For more precise range, check actual CSV file timestamps
+ min_timestamp = None
+ max_timestamp = None
+
+ # Check files more thoroughly to get precise timestamp range
+ for date_obj, folder_path in date_folders:
+ csv_files = glob.glob(os.path.join(folder_path, "*.csv"))
+
+ for csv_file in csv_files:
+ try:
+ # Try to read timestamp range from CSV with better sampling
+ # Read first and last few rows to get min/max more accurately
+ df_head = pd.read_csv(csv_file, nrows=100, encoding='utf-8-sig')
+ df_tail = pd.read_csv(csv_file, encoding='utf-8-sig').tail(100)
+
+ # Combine head and tail for better range detection
+ df_sample = pd.concat([df_head, df_tail]).drop_duplicates()
+
+ # Find timestamp column
+ timestamp_col = None
+ for col in df_sample.columns:
+ if 'timestamp' in col.lower():
+ timestamp_col = col
+ break
+
+ if timestamp_col:
+ # Convert timestamp with multiple format attempts
+ df_sample[timestamp_col] = pd.to_datetime(df_sample[timestamp_col],
+ errors='coerce')
+ df_sample = df_sample.dropna(subset=[timestamp_col])
+
+ if not df_sample.empty:
+ file_min = df_sample[timestamp_col].min()
+ file_max = df_sample[timestamp_col].max()
+
+ # Convert to timezone-naive datetime if needed
+ if hasattr(file_min, 'tz_localize') and file_min.tz is not None:
+ file_min = file_min.tz_localize(None)
+ if hasattr(file_max, 'tz_localize') and file_max.tz is not None:
+ file_max = file_max.tz_localize(None)
+
+ if min_timestamp is None or file_min < min_timestamp:
+ min_timestamp = file_min
+ if max_timestamp is None or file_max > max_timestamp:
+ max_timestamp = file_max
+
+ # print(f"π DEBUG: File {csv_file} - Range: {file_min} to {file_max}")
+
+ except Exception as e:
+ # print(f"π DEBUG: Error reading {csv_file}: {e}")
+ continue
+
+ # Use folder dates as fallback if we couldn't read CSV timestamps
+ if min_timestamp is None:
+ min_timestamp = earliest_date
+ print(f"π DEBUG: Using earliest folder date as min: {earliest_date}")
+ if max_timestamp is None:
+ max_timestamp = latest_date + timedelta(days=1)
+ print(f"π DEBUG: Using latest folder date as max: {latest_date}")
+
+ print(f"π DEBUG: Final timestamp range: {min_timestamp} to {max_timestamp}")
+
+ return jsonify({
+ "success": True,
+ "date_range": {
+ "min_date": min_timestamp.isoformat(),
+ "max_date": max_timestamp.isoformat(),
+ "folders_count": len(date_folders),
+ "earliest_folder": earliest_date.strftime("%d-%m-%Y"),
+ "latest_folder": latest_date.strftime("%d-%m-%Y")
+ }
+ })
+
+ except ImportError:
+ return jsonify({
+ "success": False,
+ "error": "pandas is required for date range calculation"
+ }), 500
+ except Exception as e:
+ return jsonify({
+ "success": False,
+ "error": str(e)
+ }), 500
+
+
@app.route("/api/plots/sessions/", methods=["GET"])
def get_plot_sessions(plot_id):
"""Get all session IDs for a specific plot ID"""
diff --git a/system_state.json b/system_state.json
index 80c7bcd..5747cfd 100644
--- a/system_state.json
+++ b/system_state.json
@@ -1,10 +1,14 @@
{
"last_state": {
- "should_connect": false,
+ "should_connect": true,
"should_stream": false,
- "active_datasets": []
+ "active_datasets": [
+ "Test",
+ "Fast",
+ "DAR"
+ ]
},
"auto_recovery_enabled": true,
- "last_update": "2025-08-15T20:58:45.862859",
+ "last_update": "2025-08-16T12:35:20.372281",
"plotjuggler_path": "C:\\Program Files\\PlotJuggler\\plotjuggler.exe"
}
\ No newline at end of file