feat: Enhance application event logging, add CSV header validation, and implement fullscreen mode for charts

This commit is contained in:
Miguel 2025-08-14 22:57:42 +02:00
parent 032959f491
commit ea2006666f
8 changed files with 554 additions and 258 deletions

View File

@ -2113,8 +2113,214 @@
"active_datasets_count": 3,
"csv_recording_active": true
}
},
{
"timestamp": "2025-08-14T22:40:58.407368",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-14T22:40:58.488687",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: DAR",
"details": {
"dataset_id": "DAR",
"variables_count": 2,
"streaming_count": 2,
"prefix": "gateway_phoenix"
}
},
{
"timestamp": "2025-08-14T22:40:58.493692",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: Fast",
"details": {
"dataset_id": "Fast",
"variables_count": 2,
"streaming_count": 1,
"prefix": "fast"
}
},
{
"timestamp": "2025-08-14T22:40:58.500690",
"level": "info",
"event_type": "csv_recording_started",
"message": "CSV recording started: 2 datasets activated",
"details": {
"activated_datasets": 2,
"total_datasets": 3
}
},
{
"timestamp": "2025-08-14T22:40:58.508089",
"level": "info",
"event_type": "udp_streaming_started",
"message": "UDP streaming to PlotJuggler started",
"details": {
"udp_host": "127.0.0.1",
"udp_port": 9870,
"datasets_available": 3
}
},
{
"timestamp": "2025-08-14T22:41:08.736742",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-14T22:42:07.195383",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-14T22:42:09.975760",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-14T22:51:29.299710",
"level": "info",
"event_type": "application_started",
"message": "Application initialization completed successfully",
"details": {}
},
{
"timestamp": "2025-08-14T22:51:29.367770",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: DAR",
"details": {
"dataset_id": "DAR",
"variables_count": 2,
"streaming_count": 2,
"prefix": "gateway_phoenix"
}
},
{
"timestamp": "2025-08-14T22:51:29.372770",
"level": "info",
"event_type": "dataset_activated",
"message": "Dataset activated: Fast",
"details": {
"dataset_id": "Fast",
"variables_count": 2,
"streaming_count": 1,
"prefix": "fast"
}
},
{
"timestamp": "2025-08-14T22:51:29.375778",
"level": "info",
"event_type": "csv_recording_started",
"message": "CSV recording started: 2 datasets activated",
"details": {
"activated_datasets": 2,
"total_datasets": 3
}
},
{
"timestamp": "2025-08-14T22:51:29.380769",
"level": "info",
"event_type": "udp_streaming_started",
"message": "UDP streaming to PlotJuggler started",
"details": {
"udp_host": "127.0.0.1",
"udp_port": 9870,
"datasets_available": 3
}
},
{
"timestamp": "2025-08-14T22:52:10.248646",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-14T22:55:17.545033",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
},
{
"timestamp": "2025-08-14T22:57:05.817266",
"level": "info",
"event_type": "plot_session_created",
"message": "Plot session 'UR29' created and started",
"details": {
"session_id": "plot_1",
"variables": [
"UR29_Brix",
"UR29_ma",
"AUX Blink_1.0S"
],
"time_window": 20,
"trigger_variable": null,
"auto_started": true
}
}
],
"last_updated": "2025-08-14T22:33:16.680664",
"total_entries": 207
"last_updated": "2025-08-14T22:57:05.817266",
"total_entries": 223
}

View File

@ -23,7 +23,7 @@
"variable_name": "AUX Blink_1.0S",
"color": "#3498db",
"line_width": 2,
"y_axis": "left",
"y_axis": "right",
"enabled": true
}
]

View File

@ -206,10 +206,10 @@ class PLCDataStreamer:
try:
self.config_manager.load_datasets()
self.config_manager.sync_streaming_variables()
# 🔍 NEW: Validate CSV headers for active datasets after configuration reload
self._validate_csv_headers_after_config_change()
self.event_logger.log_event(
"info",
"config_reload",
@ -218,16 +218,18 @@ class PLCDataStreamer:
"datasets_count": len(self.config_manager.datasets),
"active_datasets_count": len(self.config_manager.active_datasets),
"csv_recording_active": self.data_streamer.is_csv_recording(),
}
},
)
if self.logger:
self.logger.info("Dataset configuration reloaded successfully with CSV header validation")
self.logger.info(
"Dataset configuration reloaded successfully with CSV header validation"
)
except Exception as e:
self.event_logger.log_event(
"error",
"config_reload_failed",
f"Failed to reload dataset configuration: {str(e)}",
{"error": str(e)}
{"error": str(e)},
)
if self.logger:
self.logger.error(f"Failed to reload dataset configuration: {e}")
@ -237,7 +239,9 @@ class PLCDataStreamer:
"""Validate CSV headers for all active datasets after configuration changes"""
if not self.data_streamer.is_csv_recording():
if self.logger:
self.logger.debug("CSV recording not active, skipping header validation")
self.logger.debug(
"CSV recording not active, skipping header validation"
)
return
validated_datasets = []
@ -251,19 +255,23 @@ class PLCDataStreamer:
# Get current CSV file path
csv_path = self.data_streamer.get_dataset_csv_file_path(dataset_id)
if not os.path.exists(csv_path):
continue
# Get expected headers based on current configuration
dataset_variables = self.config_manager.get_dataset_variables(dataset_id)
dataset_variables = self.config_manager.get_dataset_variables(
dataset_id
)
expected_headers = ["timestamp"] + list(dataset_variables.keys())
# Read existing headers from the file
existing_headers = self.data_streamer.read_csv_headers(csv_path)
# Compare headers
if existing_headers and not self.data_streamer.compare_headers(existing_headers, expected_headers):
if existing_headers and not self.data_streamer.compare_headers(
existing_headers, expected_headers
):
# Header mismatch detected - close current file and rename it
if dataset_id in self.data_streamer.dataset_csv_files:
self.data_streamer.dataset_csv_files[dataset_id].close()
@ -272,37 +280,51 @@ class PLCDataStreamer:
# Rename the file with timestamp
prefix = self.config_manager.datasets[dataset_id]["prefix"]
renamed_path = self.data_streamer.rename_csv_file_with_timestamp(csv_path, prefix)
header_mismatches.append({
"dataset_id": dataset_id,
"dataset_name": self.config_manager.datasets[dataset_id]["name"],
"original_file": csv_path,
"renamed_file": renamed_path,
"expected_headers": expected_headers,
"existing_headers": existing_headers
})
renamed_path = self.data_streamer.rename_csv_file_with_timestamp(
csv_path, prefix
)
header_mismatches.append(
{
"dataset_id": dataset_id,
"dataset_name": self.config_manager.datasets[dataset_id][
"name"
],
"original_file": csv_path,
"renamed_file": renamed_path,
"expected_headers": expected_headers,
"existing_headers": existing_headers,
}
)
# Create new file with correct headers (will be done on next write)
# The setup_dataset_csv_file method will handle creating the new file
if self.logger:
self.logger.info(
f"CSV header mismatch detected for dataset '{self.config_manager.datasets[dataset_id]['name']}' "
f"after configuration reload. File renamed: {os.path.basename(csv_path)} -> {os.path.basename(renamed_path)}"
)
validated_datasets.append({
"dataset_id": dataset_id,
"dataset_name": self.config_manager.datasets[dataset_id]["name"],
"headers_match": len(header_mismatches) == 0 or dataset_id not in [h["dataset_id"] for h in header_mismatches],
"expected_headers": expected_headers,
"existing_headers": existing_headers
})
validated_datasets.append(
{
"dataset_id": dataset_id,
"dataset_name": self.config_manager.datasets[dataset_id][
"name"
],
"headers_match": len(header_mismatches) == 0
or dataset_id
not in [h["dataset_id"] for h in header_mismatches],
"expected_headers": expected_headers,
"existing_headers": existing_headers,
}
)
except Exception as e:
if self.logger:
self.logger.warning(f"Error validating CSV headers for dataset {dataset_id}: {e}")
self.logger.warning(
f"Error validating CSV headers for dataset {dataset_id}: {e}"
)
# Log summary of validation results
if header_mismatches:
@ -313,12 +335,14 @@ class PLCDataStreamer:
{
"mismatched_datasets": len(header_mismatches),
"total_validated": len(validated_datasets),
"details": header_mismatches
}
"details": header_mismatches,
},
)
else:
if validated_datasets and self.logger:
self.logger.info(f"CSV headers validated for {len(validated_datasets)} active datasets - all headers match")
self.logger.info(
f"CSV headers validated for {len(validated_datasets)} active datasets - all headers match"
)
# Configuration Methods
def update_plc_config(self, ip: str, rack: int, slot: int):

View File

@ -255,9 +255,18 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
},
...(zoomAvailable ? {
zoom: {
// Evita listeners wheel/touch no-passive del plugin; usa drag + pan con modificador
pan: { enabled: true, mode: 'x', modifierKey: 'shift' },
zoom: { drag: { enabled: true }, wheel: { enabled: false }, pinch: { enabled: false }, mode: 'x' }
// Solo habilitar zoom/pan en modo fullscreen
pan: {
enabled: !!session?.isFullscreen,
mode: 'x',
modifierKey: 'shift'
},
zoom: {
drag: { enabled: !!session?.isFullscreen },
wheel: { enabled: !!session?.isFullscreen },
pinch: { enabled: !!session?.isFullscreen },
mode: 'x'
}
}
} : {})
},
@ -523,6 +532,26 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
setDataPointsCount(0);
}, []);
const resetZoom = useCallback(() => {
if (!chartRef.current) return;
try {
// Try to reset zoom using the zoom plugin
if (chartRef.current.resetZoom) {
chartRef.current.resetZoom();
} else if (window.Chart?.helpers?.getRelativePosition) {
// Fallback: manually reset zoom by updating scale options
const chart = chartRef.current;
if (chart.options?.scales?.x?.realtime) {
// For realtime charts, just trigger an update
chart.update('none');
}
}
} catch (error) {
console.warn('Failed to reset zoom:', error);
}
}, []);
// Update configuration directly (for real-time style changes)
const updateConfig = useCallback(async (newConfig) => {
try {
@ -615,11 +644,12 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
pauseStreaming,
resumeStreaming,
clearChart,
resetZoom,
refreshConfiguration,
updateConfig
});
}
}, [pauseStreaming, resumeStreaming, clearChart, refreshConfiguration, updateConfig, session?.session_id, session?.onChartReady]);
}, [pauseStreaming, resumeStreaming, clearChart, resetZoom, refreshConfiguration, updateConfig, session?.session_id, session?.onChartReady]);
// Update chart when session status changes
useEffect(() => {
@ -637,6 +667,14 @@ const ChartjsPlot = ({ session, height = '400px' }) => {
}
}, [session?.is_active, session?.is_paused, pauseStreaming, resumeStreaming]);
// Recreate chart when fullscreen mode changes to enable/disable zoom
useEffect(() => {
if (chartRef.current && typeof session?.isFullscreen === 'boolean') {
console.log(`🔄 Fullscreen mode changed to ${session.isFullscreen}, recreating chart...`);
createStreamingChart();
}
}, [session?.isFullscreen, createStreamingChart]);
// Initialize chart when config is resolved - simplified approach
useEffect(() => {
// Only create chart once when we have a session_id and canvas

View File

@ -30,9 +30,16 @@ import {
Slider,
SliderTrack,
SliderFilledTrack,
SliderThumb
SliderThumb,
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalBody,
useDisclosure,
} from '@chakra-ui/react'
import { SettingsIcon, RepeatIcon } from '@chakra-ui/icons'
import { SettingsIcon, RepeatIcon, ViewIcon } from '@chakra-ui/icons'
import ChartjsPlot from './ChartjsPlot.jsx'
import * as api from '../services/api'
@ -57,6 +64,7 @@ export default function PlotRealtimeSession({
const [showSettings, setShowSettings] = useState(false)
const [isRefreshing, setIsRefreshing] = useState(false)
const { isOpen: isFullscreen, onOpen: openFullscreen, onClose: closeFullscreen } = useDisclosure()
const [localConfig, setLocalConfig] = useState({
time_window: plotDefinition.time_window || 60,
y_min: plotDefinition.y_min,
@ -78,6 +86,27 @@ export default function PlotRealtimeSession({
// Track if we're in the middle of applying changes to avoid conflicts
const applyingChangesRef = useRef(false)
// Handle fullscreen resize - force chart resize when modal opens/closes
useEffect(() => {
if (isFullscreen && chartControlsRef.current) {
// Delay to ensure modal is fully rendered
const timer = setTimeout(() => {
if (chartControlsRef.current?.refreshConfiguration) {
chartControlsRef.current.refreshConfiguration()
}
// Also try to trigger a window resize event to force Chart.js to recalculate
window.dispatchEvent(new Event('resize'))
}, 200)
return () => clearTimeout(timer)
} else if (!isFullscreen && chartControlsRef.current) {
// When exiting fullscreen, also trigger resize
const timer = setTimeout(() => {
window.dispatchEvent(new Event('resize'))
}, 100)
return () => clearTimeout(timer)
}
}, [isFullscreen])
// Update localConfig when plotDefinition changes (but not during our own updates)
useEffect(() => {
if (!applyingChangesRef.current) {
@ -108,6 +137,7 @@ export default function PlotRealtimeSession({
is_active: session.is_active,
is_paused: session.is_paused,
variables_count: plotVariables.length,
isFullscreen: isFullscreen,
config: {
...plotDefinition,
...localConfig,
@ -123,7 +153,8 @@ export default function PlotRealtimeSession({
session.is_active,
session.is_paused,
plotVariables,
localConfig
localConfig,
isFullscreen
])
// Load session status from backend (optional - session may not exist until started)
@ -430,6 +461,15 @@ export default function PlotRealtimeSession({
</Box>
<Spacer />
<HStack>
<Button
size="sm"
variant="outline"
onClick={openFullscreen}
colorScheme="blue"
leftIcon={<ViewIcon />}
>
Fullscreen
</Button>
<IconButton
icon={<RepeatIcon />}
size="sm"
@ -695,8 +735,82 @@ export default function PlotRealtimeSession({
>
Stop
</Button>
<Spacer />
<Button
size="sm"
onClick={openFullscreen}
colorScheme="blue"
variant="solid"
leftIcon={<ViewIcon />}
>
Fullscreen
</Button>
</HStack>
</CardBody>
{/* Fullscreen Modal */}
<Modal isOpen={isFullscreen} onClose={closeFullscreen} size="full">
<ModalOverlay bg="blackAlpha.800" />
<ModalContent bg={cardBg} m={0} borderRadius={0} h="100vh">
<ModalHeader>
<HStack>
<Text>📈 {plotDefinition.name} - Fullscreen Mode</Text>
<Spacer />
<Text fontSize="sm" color={muted}>
Zoom: Drag to select area | Pan: Shift + Drag | Double-click to reset
</Text>
</HStack>
</ModalHeader>
<ModalCloseButton size="lg" />
<ModalBody p={4} h="calc(100vh - 80px)" display="flex" flexDirection="column">
<Box flex="1" w="100%" minH={0}>
<ChartjsPlot session={enhancedSession} height="100%" />
</Box>
<HStack spacing={2} mt={4} justify="center">
<Button
size="sm"
onClick={() => handleControlClick('start')}
colorScheme="green"
isDisabled={session.is_active && !session.is_paused}
>
Start
</Button>
<Button
size="sm"
onClick={() => handleControlClick('pause')}
colorScheme="yellow"
isDisabled={!session.is_active || session.is_paused}
>
Pause
</Button>
<Button
size="sm"
onClick={() => handleControlClick('clear')}
variant="outline"
>
🗑 Clear
</Button>
<Button
size="sm"
onClick={() => handleControlClick('stop')}
colorScheme="red"
isDisabled={!session.is_active}
>
Stop
</Button>
{chartControlsRef.current && (
<Button
size="sm"
onClick={() => chartControlsRef.current.resetZoom?.()}
variant="outline"
>
🔄 Reset Zoom
</Button>
)}
</HStack>
</ModalBody>
</ModalContent>
</Modal>
</Card>
)
}

View File

@ -1,215 +0,0 @@
import React, { useEffect, useMemo, useRef, useState } from 'react'
import {
Box,
VStack,
HStack,
Text,
Button,
Card,
CardBody,
CardHeader,
Heading,
useColorModeValue,
Badge,
IconButton,
Divider,
Spacer,
} from '@chakra-ui/react'
import { EditIcon, SettingsIcon, DeleteIcon } from '@chakra-ui/icons'
import ChartjsPlot from './ChartjsPlot.jsx'
export default function PlotRealtimeViewer() {
const [sessions, setSessions] = useState(new Map())
const [loading, setLoading] = useState(false)
const intervalRef = useRef(null)
const muted = useColorModeValue('gray.600', 'gray.300')
const loadSessions = async () => {
try {
setLoading(true)
const res = await fetch('/api/plots')
const data = await res.json()
if (data && data.sessions) {
setSessions(prev => {
const next = new Map(prev)
const incomingIds = new Set()
for (const s of data.sessions) {
incomingIds.add(s.session_id)
const existing = next.get(s.session_id)
if (existing) {
// Mutate existing object to preserve reference
existing.name = s.name
existing.is_active = s.is_active
existing.is_paused = s.is_paused
existing.variables_count = s.variables_count
} else {
next.set(s.session_id, { ...s })
}
}
// Remove sessions not present anymore
for (const id of Array.from(next.keys())) {
if (!incomingIds.has(id)) next.delete(id)
}
return next
})
} else {
setSessions(new Map())
}
} catch {
setSessions(new Map())
} finally {
setLoading(false)
}
}
const refreshSession = async (sessionId) => {
try {
const res = await fetch(`/api/plots/${sessionId}/config`)
const data = await res.json()
if (data && data.success && data.config) {
setSessions(prev => {
const n = new Map(prev)
const existing = n.get(sessionId)
const varsCount = Array.isArray(data.config.variables)
? data.config.variables.length
: (data.config.variables ? Object.keys(data.config.variables).length : (existing?.variables_count || 0))
if (existing) {
existing.name = data.config.name
existing.is_active = data.config.is_active
existing.is_paused = data.config.is_paused
existing.variables_count = varsCount
} else {
n.set(sessionId, {
session_id: sessionId,
name: data.config.name,
is_active: data.config.is_active,
is_paused: data.config.is_paused,
variables_count: varsCount,
})
}
return n
})
}
} catch { /* ignore */ }
}
const controlSession = async (sessionId, action) => {
try {
await fetch(`/api/plots/${sessionId}/control`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ action }),
})
await refreshSession(sessionId)
} catch { /* ignore */ }
}
useEffect(() => {
loadSessions()
intervalRef.current = setInterval(loadSessions, 5000)
return () => { if (intervalRef.current) clearInterval(intervalRef.current) }
}, [])
const sessionsList = useMemo(() => Array.from(sessions.values()), [sessions])
if (loading && sessionsList.length === 0) {
return <Text color={muted}>Cargando sesiones de plots</Text>
}
if (sessionsList.length === 0) {
return (
<Card>
<CardBody>
<Text color={muted}>No hay sesiones de plot. Cree o edite plots en la sección superior.</Text>
</CardBody>
</Card>
)
}
return (
<VStack spacing={4} align="stretch">
{sessionsList.map((session) => (
<PlotRealtimeCard
key={session.session_id}
session={session}
onControl={controlSession}
onRefresh={refreshSession}
/>
))}
</VStack>
)
}
function PlotRealtimeCard({ session, onControl, onRefresh }) {
const cardBg = useColorModeValue('white', 'gray.700')
const borderColor = useColorModeValue('gray.200', 'gray.600')
const muted = useColorModeValue('gray.600', 'gray.300')
const chartControlsRef = useRef(null)
const handleChartReady = (controls) => {
chartControlsRef.current = controls
}
const enhancedSession = {
...session,
onChartReady: handleChartReady,
}
const handleControlClick = async (action) => {
if (chartControlsRef.current) {
switch (action) {
case 'pause':
chartControlsRef.current.pauseStreaming()
break
case 'start':
case 'resume':
chartControlsRef.current.resumeStreaming()
break
case 'clear':
chartControlsRef.current.clearChart()
break
case 'stop':
chartControlsRef.current.pauseStreaming()
break
}
}
// No esperar a que el backend responda para aplicar efecto local
onControl(session.session_id, action)
}
return (
<Card bg={cardBg} borderColor={borderColor}>
<CardHeader>
<FlexHeader session={session} muted={muted} onRefresh={() => onRefresh(session.session_id)} />
</CardHeader>
<CardBody>
<ChartjsPlot session={enhancedSession} height="360px" />
<HStack mt={3} spacing={2}>
<Button size="sm" onClick={() => handleControlClick('start')} colorScheme="green"> Start</Button>
<Button size="sm" onClick={() => handleControlClick('pause')} colorScheme="yellow"> Pause</Button>
<Button size="sm" onClick={() => handleControlClick('clear')} variant="outline">🗑 Clear</Button>
<Button size="sm" onClick={() => handleControlClick('stop')} colorScheme="red"> Stop</Button>
</HStack>
</CardBody>
</Card>
)
}
function FlexHeader({ session, muted, onRefresh }) {
return (
<HStack align="center">
<Box>
<Heading size="sm">📈 {session.name || session.session_id}</Heading>
<Text fontSize="sm" color={muted} mt={1}>
Variables: {session.variables_count || 0} | Status: <strong>{session.is_active ? (session.is_paused ? 'Paused' : 'Active') : 'Stopped'}</strong>
</Text>
</Box>
<Spacer />
<HStack>
<IconButton icon={<SettingsIcon />} size="sm" variant="outline" aria-label="Refresh status" onClick={onRefresh} />
</HStack>
</HStack>
)
}

View File

@ -3,11 +3,11 @@
"should_connect": true,
"should_stream": true,
"active_datasets": [
"DAR",
"Fast",
"DAR",
"Test"
]
},
"auto_recovery_enabled": true,
"last_update": "2025-08-14T22:33:00.768192"
"last_update": "2025-08-14T22:51:29.383787"
}

129
test_config_reload.py Normal file
View File

@ -0,0 +1,129 @@
"""
Test script to validate automatic configuration reloading
"""
import json
import requests
import time
# Configuration
BASE_URL = "http://localhost:5000"
TEST_DATASET_ID = "TestReload"
def test_config_reload():
"""Test that backend automatically reloads configuration when datasets are updated"""
print("🧪 Testing automatic configuration reload...")
try:
# Step 1: Get current dataset definitions
print("📖 Reading current dataset definitions...")
response = requests.get(f"{BASE_URL}/api/config/dataset-definitions")
if not response.ok:
print(f"❌ Failed to read dataset definitions: {response.status_code}")
return False
current_config = response.json()
datasets = current_config.get("data", {}).get("datasets", [])
print(f"Current datasets: {[d.get('id') for d in datasets]}")
# Step 2: Add a test dataset
print(f" Adding test dataset: {TEST_DATASET_ID}")
test_dataset = {
"id": TEST_DATASET_ID,
"name": "Test Reload Dataset",
"prefix": "test_reload",
"sampling_interval": 1.0,
"enabled": False,
}
# Add to datasets list
new_datasets = [
d for d in datasets if d.get("id") != TEST_DATASET_ID
] # Remove if exists
new_datasets.append(test_dataset)
new_config = {
"datasets": new_datasets,
"version": "1.0",
"last_update": f"{time.time()}",
}
# Save configuration
response = requests.put(
f"{BASE_URL}/api/config/dataset-definitions",
headers={"Content-Type": "application/json"},
json=new_config,
)
if not response.ok:
print(f"❌ Failed to save dataset definitions: {response.status_code}")
return False
print("✅ Dataset definitions saved")
# Step 3: Check if backend has reloaded the configuration
print("🔍 Checking if backend reloaded configuration...")
time.sleep(1) # Give backend a moment to reload
# Get status from backend
response = requests.get(f"{BASE_URL}/api/status")
if not response.ok:
print(f"❌ Failed to get status: {response.status_code}")
return False
status = response.json()
backend_datasets = status.get("datasets", {})
if TEST_DATASET_ID in backend_datasets:
print(f"✅ Backend successfully loaded new dataset: {TEST_DATASET_ID}")
print(f"Dataset details: {backend_datasets[TEST_DATASET_ID]}")
# Step 4: Clean up - remove test dataset
print("🧹 Cleaning up test dataset...")
cleanup_datasets = [
d for d in new_datasets if d.get("id") != TEST_DATASET_ID
]
cleanup_config = {
"datasets": cleanup_datasets,
"version": "1.0",
"last_update": f"{time.time()}",
}
response = requests.put(
f"{BASE_URL}/api/config/dataset-definitions",
headers={"Content-Type": "application/json"},
json=cleanup_config,
)
if response.ok:
print("✅ Test dataset cleaned up")
else:
print(
f"⚠️ Warning: Failed to clean up test dataset: {response.status_code}"
)
return True
else:
print(
f"❌ Backend did not reload configuration. Available datasets: {list(backend_datasets.keys())}"
)
return False
except requests.exceptions.ConnectionError:
print(
"❌ Could not connect to backend. Make sure the Flask server is running on http://localhost:5000"
)
return False
except Exception as e:
print(f"❌ Test failed with error: {e}")
return False
if __name__ == "__main__":
success = test_config_reload()
if success:
print("\n🎉 Configuration reload test PASSED!")
else:
print("\n💥 Configuration reload test FAILED!")