From a655e68b7172d8e81c6457700539c70c597343c2 Mon Sep 17 00:00:00 2001 From: Miguel Date: Sun, 14 Sep 2025 12:10:06 +0200 Subject: [PATCH] Refactor code structure for improved readability and maintainability --- app/app.py | 59 +- app/config/config.py | 8 + app/services/data_manager.py | 11 +- app/templates/script_group.html | 29 +- app/templates/script_loading.html | 285 +++ .../group_3/user_1/Hola/config.json | 6 + .../user_1/Test Final Project/config.json | 6 + .../Test Data Processing Project/config.json | 6 + docker-compose.yml | 10 +- scripts/init_db.py | 21 +- scripts/run_app.py | 18 +- .../user_1/project_1/script_7/metadata.json | 4 +- .../user_1/project_1/script_7/script_app.py | 2 +- .../user_1/project_1/script_8/metadata.json | 7 + .../user_1/project_1/script_8/script_app.py | 1786 +++++++++++++++++ 15 files changed, 2212 insertions(+), 46 deletions(-) create mode 100644 app/templates/script_loading.html create mode 100644 data/script_groups/group_3/user_1/Hola/config.json create mode 100644 data/script_groups/group_3/user_1/Test Final Project/config.json create mode 100644 data/script_groups/group_5/user_1/Test Data Processing Project/config.json create mode 100644 workspaces/user_1/project_1/script_8/metadata.json create mode 100644 workspaces/user_1/project_1/script_8/script_app.py diff --git a/app/app.py b/app/app.py index 78a2faf..b10a9c2 100644 --- a/app/app.py +++ b/app/app.py @@ -221,6 +221,45 @@ def register_routes(app): user_language=user_language, ) + @app.route("/script//loading") + @login_required + def script_loading_page(script_id): + """ + Página de loading intermedia que se muestra mientras el script se inicia + """ + script = Script.query.get_or_404(script_id) + + # Check if user can access this script + if not can_access_script(current_user.user_level, script.required_level): + flash("Insufficient permissions to access this script", "error") + return redirect(url_for("dashboard")) + + # Get active project for this script's group + active_project_id = session.get("active_project_id") + project = None + + if active_project_id: + project = UserProject.query.filter_by( + id=active_project_id, + user_id=current_user.id, + group_id=script.group_id, + ).first() + + if not project: + flash("No active project found for this script group", "error") + return redirect(url_for("script_group_view", group_id=script.group_id)) + + # Generate proxy URL + proxy_url = f"/project/{project.id}/script/{script.id}/user/{current_user.id}/" + + return render_template( + "script_loading.html", + script=script, + script_group=script.script_group, + project=project, + proxy_url=proxy_url, + ) + # Administration routes @app.route("/admin") @login_required @@ -568,7 +607,10 @@ def register_routes(app): ) print(f"[API_EXEC] Script executor result: {result}") - return jsonify(result) + print(f"[API_EXEC] About to return JSON response...") + response = jsonify(result) + print(f"[API_EXEC] JSON response created successfully") + return response @app.route("/api/scripts//stop", methods=["POST"]) @login_required @@ -1105,7 +1147,20 @@ def create_app(config_name="default"): app = Flask(__name__) # Load configuration - app.config.from_object(config[config_name]) + config_obj = config[config_name] + app.config.from_object(config_obj) + + # Apply security configuration if present + if hasattr(config_obj, 'SECURITY_CONFIG'): + security_config = config_obj.SECURITY_CONFIG + + # Apply session cookie settings + if 'session_cookie_secure' in security_config: + app.config['SESSION_COOKIE_SECURE'] = security_config['session_cookie_secure'] + if 'session_cookie_httponly' in security_config: + app.config['SESSION_COOKIE_HTTPONLY'] = security_config['session_cookie_httponly'] + if 'session_cookie_samesite' in security_config: + app.config['SESSION_COOKIE_SAMESITE'] = security_config['session_cookie_samesite'] # Add custom Jinja2 filters @app.template_filter("fromjson") diff --git a/app/config/config.py b/app/config/config.py index 753b5cf..47368d3 100644 --- a/app/config/config.py +++ b/app/config/config.py @@ -126,6 +126,14 @@ class DevelopmentConfig(Config): 'pool_pre_ping': True, 'echo': True, # Log SQL queries in development } + + # Development security config - allow HTTP cookies + SECURITY_CONFIG = { + **Config.SECURITY_CONFIG, + "session_cookie_secure": False, # Allow HTTP cookies in development + "session_cookie_httponly": True, + "session_cookie_samesite": "Lax", + } class ProductionConfig(Config): diff --git a/app/services/data_manager.py b/app/services/data_manager.py index 2a9fc2c..ff7b789 100644 --- a/app/services/data_manager.py +++ b/app/services/data_manager.py @@ -79,9 +79,12 @@ class DataManager: print(f"Error saving config file {config_file}: {e}") raise - def list_user_projects(self, user_id: int, group_id: int) -> List[Dict]: - """List all projects for user in specific group.""" - projects = UserProject.query.filter_by(user_id=user_id, group_id=group_id).all() + def list_user_projects(self, user_id: int, group_id: int = None) -> List[Dict]: + """List all projects for user in specific group or all groups if group_id is None.""" + if group_id is None: + projects = UserProject.query.filter_by(user_id=user_id).all() + else: + projects = UserProject.query.filter_by(user_id=user_id, group_id=group_id).all() result = [] for project in projects: @@ -89,7 +92,7 @@ class DataManager: # Add file system information project_path = self.get_user_project_path( - user_id, group_id, project.project_name + user_id, project.group_id, project.project_name ) if project_path.exists(): diff --git a/app/templates/script_group.html b/app/templates/script_group.html index 84f5c0b..d2baec8 100644 --- a/app/templates/script_group.html +++ b/app/templates/script_group.html @@ -799,32 +799,11 @@ class ScriptGroupManager { async runScript(scriptId, parameters = {}) { try { - const response = await fetch(`/api/scripts/${scriptId}/execute`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - parameters: parameters - }) - }); - - const result = await response.json(); - - if (result.success) { - ScriptsManager.showNotification('Script started successfully', 'success'); - this.updateScriptStatus(scriptId, 'running'); - - // If the script has an interface URL, wait for it to be ready before opening - if (result.interface_url) { - this.waitForScriptAndOpen(result.interface_url, scriptId); - } - } else { - ScriptsManager.showNotification(result.message || 'Failed to start script', 'error'); - } + // Redirect immediately to loading page - it will handle script execution + window.location.href = `/script/${scriptId}/loading`; } catch (error) { - console.error('Error running script:', error); - ScriptsManager.showNotification('Error running script', 'error'); + console.error('Error redirecting to loading page:', error); + ScriptsManager.showNotification('Error starting script', 'error'); } } diff --git a/app/templates/script_loading.html b/app/templates/script_loading.html new file mode 100644 index 0000000..f85da80 --- /dev/null +++ b/app/templates/script_loading.html @@ -0,0 +1,285 @@ + + + + + + Loading Script - ScriptsManager + + + + + + + + + +
+
+ +
+ Loading... +
+ + +

Starting Script...

+

Initializing script environment...

+ + +
+
+
+ + +
+
{{ script.display_name or script.filename }}
+

{{ script.description or "Running script..." }}

+ + {{ script_group.name }} | + {{ current_user.username }} | + {{ project.project_name }} + +
+ + + +
+
+ + + + + + + \ No newline at end of file diff --git a/data/script_groups/group_3/user_1/Hola/config.json b/data/script_groups/group_3/user_1/Hola/config.json new file mode 100644 index 0000000..bd309ac --- /dev/null +++ b/data/script_groups/group_3/user_1/Hola/config.json @@ -0,0 +1,6 @@ +{ + "created_at": "2025-09-14T08:09:16.779232", + "last_modified": "2025-09-14T08:09:16.779241", + "project_settings": {}, + "user_preferences": {} +} \ No newline at end of file diff --git a/data/script_groups/group_3/user_1/Test Final Project/config.json b/data/script_groups/group_3/user_1/Test Final Project/config.json new file mode 100644 index 0000000..3122826 --- /dev/null +++ b/data/script_groups/group_3/user_1/Test Final Project/config.json @@ -0,0 +1,6 @@ +{ + "created_at": "2025-09-14T08:01:20.986814", + "last_modified": "2025-09-14T08:01:20.986822", + "project_settings": {}, + "user_preferences": {} +} \ No newline at end of file diff --git a/data/script_groups/group_5/user_1/Test Data Processing Project/config.json b/data/script_groups/group_5/user_1/Test Data Processing Project/config.json new file mode 100644 index 0000000..c578155 --- /dev/null +++ b/data/script_groups/group_5/user_1/Test Data Processing Project/config.json @@ -0,0 +1,6 @@ +{ + "created_at": "2025-09-14T07:57:47.252787", + "last_modified": "2025-09-14T07:57:47.252795", + "project_settings": {}, + "user_preferences": {} +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 0ade5e7..0b54a48 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,7 +87,6 @@ services: - scriptsmanager_network ports: - "5003:5003" - - "5200-5400:5200-5400" - "5678:5678" # Debug port environment: # Database Configuration (same as production for parity) @@ -115,20 +114,21 @@ services: - SIDEL_LOGO_PATH=/app/app/static/images/SIDEL.png - CORPORATE_BRANDING=true volumes: - # Hot reload: mount entire codebase + # Hot reload: mount entire codebase excluding workspaces - .:/app - ./data:/app/data - ./backup:/app/backup - ./logs:/app/logs - # Workspaces for proxy scripts - - ./workspaces:/app/workspaces + # Workspaces for proxy scripts (mounted separately to avoid hot-reload) + - ./workspaces:/app/workspaces:delegated depends_on: postgres: condition: service_healthy + restart: unless-stopped command: > bash -c "source activate scriptsmanager && echo '=== SIDEL ScriptsManager Development Environment ===' && - echo 'Hot reload enabled - code changes will be reflected automatically' && + echo 'Development mode with stable container (hot reload disabled for stability)' && echo 'Application will be available at: http://localhost:5003' && echo 'Debug port available at: 5678' && python scripts/init_db.py && diff --git a/scripts/init_db.py b/scripts/init_db.py index 1838e02..fed19e1 100644 --- a/scripts/init_db.py +++ b/scripts/init_db.py @@ -30,11 +30,22 @@ def init_database(): with app.app_context(): try: - # Drop all tables (for fresh start) - print("Dropping existing tables...") - db.drop_all() - - # Create all tables + # Check if database is already initialized + from sqlalchemy import inspect + inspector = inspect(db.engine) + existing_tables = inspector.get_table_names() + + if existing_tables: + print(f"Database already initialized with {len(existing_tables)} tables.") + print("Skipping table creation and data initialization.") + + # Still discover script groups in case new ones were added + print("Updating script group discovery...") + discover_script_groups() + print("Database update completed successfully!") + return + + # Only create tables if database is empty print("Creating database tables...") db.create_all() diff --git a/scripts/run_app.py b/scripts/run_app.py index 16d46fc..e5f9ee5 100755 --- a/scripts/run_app.py +++ b/scripts/run_app.py @@ -25,15 +25,29 @@ def run_app(): port = int(os.environ.get('PORT', 5003 if os.environ.get('DEBUG') == 'true' else 5002)) debug = os.environ.get('DEBUG', 'false').lower() == 'true' + # For development: disable auto-reloader to prevent container restarts when workspaces change + # This avoids the issue where script execution causes Flask to restart + use_reloader = False if debug else False + # Run with SocketIO support - socketio.run(app, host="0.0.0.0", port=port, debug=debug) + socketio.run( + app, + host="0.0.0.0", + port=port, + debug=debug, + use_reloader=use_reloader + ) if __name__ == "__main__": port = int(os.environ.get('PORT', 5003 if os.environ.get('DEBUG') == 'true' else 5002)) print("Starting ScriptsManager...") print(f"🚀 Application will be available at: http://localhost:{port}") - print("🔄 HOT-RELOAD ENABLED - Modify files and they will update automatically!") + if os.environ.get('DEBUG') == 'true': + print("� DEVELOPMENT MODE - Auto-reloader disabled for container stability") + print(" Manual restart required for code changes") + else: + print("🏭 PRODUCTION MODE") print("Press Ctrl+C to stop the server") print("-" * 50) run_app() diff --git a/workspaces/user_1/project_1/script_7/metadata.json b/workspaces/user_1/project_1/script_7/metadata.json index 9869365..7b61397 100644 --- a/workspaces/user_1/project_1/script_7/metadata.json +++ b/workspaces/user_1/project_1/script_7/metadata.json @@ -6,10 +6,10 @@ "CONDA_DEFAULT_ENV": "tsnet", "USER_LEVEL": "admin", "PROJECT_ID": "1", - "PROJECT_NAME": "test", + "PROJECT_NAME": "Hola", "USER_THEME": "light", "USER_LANGUAGE": "en", "SCRIPT_GROUP_NAME": "Hydraulic Analysis Tools" }, - "created_at": "2025-09-14T07:35:04.965536" + "created_at": "2025-09-14T10:08:46.733111" } \ No newline at end of file diff --git a/workspaces/user_1/project_1/script_7/script_app.py b/workspaces/user_1/project_1/script_7/script_app.py index eef9ab3..6f48087 100644 --- a/workspaces/user_1/project_1/script_7/script_app.py +++ b/workspaces/user_1/project_1/script_7/script_app.py @@ -10,7 +10,7 @@ os.chdir(WORKSPACE_PATH) # Variables disponibles para el script del usuario PROJECT_WORKSPACE = WORKSPACE_PATH PARAMETERS = {} -ENVIRONMENT = {'CONDA_DEFAULT_ENV': 'tsnet', 'USER_LEVEL': 'admin', 'PROJECT_ID': '1', 'PROJECT_NAME': 'test', 'USER_THEME': 'light', 'USER_LANGUAGE': 'en', 'SCRIPT_GROUP_NAME': 'Hydraulic Analysis Tools'} +ENVIRONMENT = {'CONDA_DEFAULT_ENV': 'tsnet', 'USER_LEVEL': 'admin', 'PROJECT_ID': '1', 'PROJECT_NAME': 'Hola', 'USER_THEME': 'light', 'USER_LANGUAGE': 'en', 'SCRIPT_GROUP_NAME': 'Hydraulic Analysis Tools'} # Importar Flask y configurar aplicación from flask import Flask, request, jsonify, render_template, send_file, redirect, url_for diff --git a/workspaces/user_1/project_1/script_8/metadata.json b/workspaces/user_1/project_1/script_8/metadata.json new file mode 100644 index 0000000..f368e3a --- /dev/null +++ b/workspaces/user_1/project_1/script_8/metadata.json @@ -0,0 +1,7 @@ +{ + "port": 5201, + "workspace": "/app/workspaces/user_1/project_1/script_8", + "parameters": null, + "environment": null, + "created_at": "2025-09-14T07:38:22.462360" +} \ No newline at end of file diff --git a/workspaces/user_1/project_1/script_8/script_app.py b/workspaces/user_1/project_1/script_8/script_app.py new file mode 100644 index 0000000..257aaba --- /dev/null +++ b/workspaces/user_1/project_1/script_8/script_app.py @@ -0,0 +1,1786 @@ + +import sys +import os +sys.path.insert(0, "/app") + +# Configuración del workspace +WORKSPACE_PATH = "/app/workspaces/user_1/project_1/script_8" +os.chdir(WORKSPACE_PATH) + +# Variables disponibles para el script del usuario +PROJECT_WORKSPACE = WORKSPACE_PATH +PARAMETERS = {} +ENVIRONMENT = {} + +# Importar Flask y configurar aplicación +from flask import Flask, request, jsonify, render_template, send_file, redirect, url_for +import threading +import signal +import atexit + +app = Flask(__name__) +app.config['SECRET_KEY'] = 'script-session-key' + +# Health check endpoint requerido +@app.route('/health') +def health_check(): + return {"status": "ok", "workspace": WORKSPACE_PATH}, 200 + +# Función para limpiar al cerrar +def cleanup(): + print("Script shutting down...") + +atexit.register(cleanup) +signal.signal(signal.SIGTERM, lambda s, f: sys.exit(0)) + +# === CÓDIGO DEL USUARIO === +""" +ScriptsManager Metadata: +@description: Interactive Water Hammer Simulator for Syrup Pumping Systems +@description_long: docs/hammer_simulator.md +@description_es: Simulador Interactivo de Golpe de Ariete para Sistemas de Bombeo de Jarabe +@description_long_es: docs/hammer_simulator_es.md +@description_it: Simulatore Interattivo del Colpo d'Ariete per Sistemi di Pompaggio Sciroppo +@description_long_it: docs/hammer_simulator_it.md +@description_fr: Simulateur Interactif de Coup de Bélier pour Systèmes de Pompage de Sirop +@description_long_fr: docs/hammer_simulator_fr.md +@required_level: operator +@category: simulation +@tags: hydraulics,water_hammer,pumping,syrup,transient_analysis +@parameters: [] +@execution_timeout: 3600 +@flask_port: auto +""" + +import argparse +import os +import json +import math +import numpy as np +from flask import Flask, render_template, request, jsonify, session, send_file +from flask_socketio import SocketIO, emit +from datetime import datetime +import logging +import webbrowser +import threading +import time +import sys +import sqlite3 +import uuid +import base64 +import io +import zipfile +from pathlib import Path + +# Import for PDF and image generation +try: + import matplotlib + + matplotlib.use("Agg") # Use non-interactive backend + import matplotlib.pyplot as plt + from matplotlib.backends.backend_pdf import PdfPages + + MATPLOTLIB_AVAILABLE = True +except ImportError: + MATPLOTLIB_AVAILABLE = False + +# Import for PlantUML integration +try: + import requests + import urllib.parse + + REQUESTS_AVAILABLE = True +except ImportError: + REQUESTS_AVAILABLE = False + +try: + from reportlab.lib.pagesizes import letter, A4 + from reportlab.platypus import ( + SimpleDocTemplate, + Paragraph, + Spacer, + Image as RLImage, + Table, + TableStyle, + ) + from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle + from reportlab.lib.units import inch + from reportlab.lib import colors + + REPORTLAB_AVAILABLE = True +except ImportError: + REPORTLAB_AVAILABLE = False + + +def parse_arguments(): + """Parse command line arguments from ScriptsManager""" + parser = argparse.ArgumentParser( + description="Water Hammer Simulator for ScriptsManager" + ) + parser.add_argument( + "--data-dir", required=True, help="Data directory for persistent storage" + ) + parser.add_argument("--user-level", required=True, help="User permission level") + parser.add_argument("--port", type=int, required=True, help="Flask port number") + parser.add_argument("--project-id", required=True, help="Project identifier") + parser.add_argument("--project-name", required=True, help="Project display name") + parser.add_argument( + "--theme", required=False, default="light", help="User interface theme (light/dark)" + ) + parser.add_argument( + "--language", required=False, default="en", help="User interface language (en/es/it/fr)" + ) + parser.add_argument( + "--user-id", required=False, default="unknown", help="User identifier" + ) + parser.add_argument( + "--session-id", required=False, help="Session identifier from ScriptsManager" + ) + return parser.parse_args() + + +class ExecutionLogger: + """Comprehensive logging system according to ScriptsManager specifications""" + + def __init__(self, data_dir, user_id, project_id, script_name): + self.data_dir = data_dir + self.user_id = user_id + self.project_id = project_id + self.script_name = script_name + self.execution_id = str(uuid.uuid4()) + self.start_time = datetime.now() + + # Create logs directory structure + self.logs_dir = os.path.join( + data_dir, "..", "..", "..", "logs", "executions", user_id + ) + today = datetime.now().strftime("%Y-%m-%d") + self.daily_log_dir = os.path.join(self.logs_dir, today) + os.makedirs(self.daily_log_dir, exist_ok=True) + + # Log file path + self.log_file = os.path.join( + self.daily_log_dir, f"execution_{self.execution_id}.log" + ) + + # Setup logging + self.logger = logging.getLogger(f"execution_{self.execution_id}") + self.logger.setLevel(logging.INFO) + + # File handler + file_handler = logging.FileHandler(self.log_file, encoding="utf-8") + file_handler.setLevel(logging.INFO) + + # Formatter + formatter = logging.Formatter( + "%(asctime)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + file_handler.setFormatter(formatter) + self.logger.addHandler(file_handler) + + # Log execution start + self.log_info(f"Execution started for script: {script_name}") + self.log_info(f"User: {user_id}, Project: {project_id}") + self.log_info(f"Execution ID: {self.execution_id}") + + def log_info(self, message): + """Log info message""" + self.logger.info(message) + + def log_error(self, message): + """Log error message""" + self.logger.error(message) + + def log_warning(self, message): + """Log warning message""" + self.logger.warning(message) + + def log_calculation(self, params, results): + """Log calculation details""" + self.log_info("=== CALCULATION PERFORMED ===") + self.log_info(f"Input parameters: {json.dumps(params, indent=2)}") + self.log_info(f"Results: {json.dumps(results.get('parameters', {}), indent=2)}") + + def log_user_action(self, action, details=None): + """Log user actions""" + message = f"User action: {action}" + if details: + message += f" - {details}" + self.log_info(message) + + def log_session_event(self, event, details=None): + """Log session events""" + message = f"Session event: {event}" + if details: + message += f" - {details}" + self.log_info(message) + + def finalize_execution(self, exit_code=0): + """Finalize execution logging""" + duration = (datetime.now() - self.start_time).total_seconds() + self.log_info(f"Execution completed. Duration: {duration:.2f} seconds") + self.log_info(f"Exit code: {exit_code}") + + # Remove handlers to close file + for handler in self.logger.handlers[:]: + handler.close() + self.logger.removeHandler(handler) + + +class SessionManager: + """Enhanced session management according to specifications""" + + def __init__(self, user_id, project_id, execution_logger): + self.user_id = user_id + self.project_id = project_id + self.session_id = str(uuid.uuid4()) + self.start_time = datetime.now() + self.last_activity = self.start_time + self.logger = execution_logger + + self.logger.log_session_event( + "session_created", + { + "session_id": self.session_id, + "user_id": user_id, + "project_id": project_id, + }, + ) + + def update_activity(self): + """Update last activity timestamp""" + self.last_activity = datetime.now() + + def is_expired(self, timeout_minutes=30): + """Check if session is expired""" + return (datetime.now() - self.last_activity).seconds > (timeout_minutes * 60) + + def cleanup(self): + """Cleanup session""" + duration = (datetime.now() - self.start_time).total_seconds() + self.logger.log_session_event( + "session_ended", + {"duration_seconds": duration, "session_id": self.session_id}, + ) + + +class PlantUMLGenerator: + """Generator for PlantUML system diagrams""" + + def __init__(self, server_url="http://192.168.88.26:8881"): + self.server_url = server_url + + def generate_system_diagram(self, params, results=None): + """Generate PlantUML code for the water hammer system diagram""" + + # Extract key parameters + pipe_length = params.get("pipe_length", 300) + # Convert to mm + pipe_diameter = params.get("pipe_diameter", 0.065) * 1000 + pump_pressure = params.get("pump_pressure", 7) + flow_rate = params.get("flow_rate", 22000) + closure_time = params.get("closure_time", 2.0) + damper_enabled = params.get("damper_enabled", False) + damper_position = params.get("damper_position", 280) + damper_volume = params.get("damper_volume", 50) + + # Get results if available + real_surge = "" + wave_speed = "" + critical_time = "" + if results: + real_surge = f"{results.get('real_surge', 0):.1f} bar" + wave_speed = f"{results.get('wave_speed', 0):.0f} m/s" + critical_time = f"{results.get('critical_time', 0):.2f} s" + + # Start PlantUML diagram + diagram = "@startuml\n" + diagram += "!theme plain\n" + diagram += "skinparam backgroundColor white\n" + diagram += "skinparam componentStyle rectangle\n\n" + + # Title + diagram += "title Water Hammer System Diagram\\n" + if real_surge: + diagram += f"Pressure Surge: {real_surge} | " + if wave_speed: + diagram += f"Wave Speed: {wave_speed} | " + if critical_time: + diagram += f"Critical Time: {critical_time}" + diagram += "\n\n" + + # Define components + diagram += 'rectangle "PUMP" as pump #lightblue {\n' + diagram += f" Pressure: {pump_pressure} bar\\n" + diagram += f" Flow: {flow_rate/1000:.1f} m³/h\n" + diagram += "}\n\n" + + # Pipeline sections + if damper_enabled: + # Pipeline with damper + pre_damper_length = damper_position + post_damper_length = pipe_length - damper_position + + diagram += 'rectangle "PIPELINE" as pipe1 #lightgray {\n' + diagram += f" Length: {pre_damper_length:.0f} m\\n" + diagram += f" Diameter: {pipe_diameter:.0f} mm\n" + diagram += "}\n\n" + + diagram += 'rectangle "DAMPER" as damper #orange {\n' + diagram += f" Volume: {damper_volume} L\\n" + diagram += f" Position: {damper_position:.0f} m\n" + diagram += "}\n\n" + + diagram += 'rectangle "PIPELINE" as pipe2 #lightgray {\n' + diagram += f" Length: {post_damper_length:.0f} m\\n" + diagram += f" Diameter: {pipe_diameter:.0f} mm\n" + diagram += "}\n\n" + else: + # Single pipeline + diagram += 'rectangle "PIPELINE" as pipeline #lightgray {\n' + diagram += f" Total Length: {pipe_length:.0f} m\\n" + diagram += f" Diameter: {pipe_diameter:.0f} mm\n" + diagram += "}\n\n" + + # Valve + diagram += 'rectangle "VALVE" as valve #red {\n' + diagram += f" Closure Time: {closure_time} s\n" + diagram += "}\n\n" + + # Tank/Reservoir + diagram += 'rectangle "TANK" as tank #lightgreen {\n' + diagram += " Destination\n" + diagram += "}\n\n" + + # Connections + if damper_enabled: + diagram += "pump --> pipe1 : Flow\n" + diagram += "pipe1 --> damper\n" + diagram += "damper --> pipe2\n" + diagram += "pipe2 --> valve\n" + else: + diagram += "pump --> pipeline : Flow\n" + diagram += "pipeline --> valve\n" + + diagram += "valve --> tank\n\n" + + # Add notes with key parameters + diagram += "note top of pump\n" + diagram += "System Parameters:\\n" + diagram += f"• Total Length: {pipe_length} m\\n" + diagram += f"• Flow Rate: {flow_rate} L/h\\n" + diagram += f"• Pump Pressure: {pump_pressure} bar\n" + if damper_enabled: + diagram += f"• Damper: YES ({damper_position}m)\n" + else: + diagram += "• Damper: NO\n" + diagram += "end note\n\n" + + if real_surge: + diagram += "note bottom of valve\n" + diagram += f"Results:\\n" + diagram += f"• Pressure Surge: {real_surge}\\n" + if wave_speed: + diagram += f"• Wave Speed: {wave_speed}\\n" + if critical_time: + diagram += f"• Critical Time: {critical_time}\n" + diagram += "end note\n\n" + + diagram += "@enduml" + + return diagram + + def render_diagram(self, plantuml_code): + """Send PlantUML code to server and get rendered image""" + if not REQUESTS_AVAILABLE: + raise ImportError("requests library not available for PlantUML rendering") + + try: + # Encode PlantUML code for URL + encoded_code = self._encode_plantuml(plantuml_code) + + # Construct URL for PNG format + url = f"{self.server_url}/png/{encoded_code}" + + # Request the image + response = requests.get(url, timeout=30) + response.raise_for_status() + + return response.content + + except Exception as e: + raise Exception(f"Error rendering PlantUML diagram: {str(e)}") + + def _encode_plantuml(self, plantuml_code): + """Encode PlantUML code for URL transmission""" + import zlib + import base64 + + # Compress the code + compressed = zlib.compress(plantuml_code.encode("utf-8")) + + # Base64 encode with URL-safe alphabet + encoded = base64.b64encode(compressed).decode("ascii") + + # Replace characters for URL safety + encoded = encoded.replace("+", "-").replace("/", "_") + + return encoded + + +class WaterHammerCalculator: + """Core calculation engine for water hammer analysis""" + + def __init__(self): + self.default_params = { + "pipe_length": 300.0, # m + "pipe_diameter": 0.065, # m (65mm internal diameter) + "wall_thickness": 0.003, # m (3mm wall thickness) + "roughness": 1.5e-6, # m (stainless steel roughness) + "flow_rate": 22000.0, # L/h + "pump_pressure": 7.0, # bar + "fluid_density": 1100.0, # kg/m³ (syrup density) + "fluid_temperature": 20.0, # °C + "bulk_modulus": 2.2e9, # Pa + "young_modulus": 200e9, # Pa (steel) + "closure_time": 2.0, # s + "damper_enabled": False, + "damper_volume": 50.0, # L + "damper_precharge": 4.0, # bar + "damper_gas_percentage": 60.0, # % + "damper_position": 280.0, # m + "damper_connection_diameter": 0.05, # m (50mm diameter) + "damper_connection_length": 0.5, # m + "simulation_time": 10.0, # s + } + + def calculate_system_parameters(self, params): + """Calculate derived system parameters""" + + # Get values (dimensions already in meters) + L = params["pipe_length"] # m + D = params["pipe_diameter"] # m (already in meters) + e = params["wall_thickness"] # m (already in meters) + Q = params["flow_rate"] / 3600 / 1000 # Convert L/h to m³/s + P_pump = params["pump_pressure"] # bar + rho = params["fluid_density"] # kg/m³ + K = params["bulk_modulus"] # Pa + E = params["young_modulus"] # Pa + t_close = params["closure_time"] # s + + # Calculate fluid velocity + A = math.pi * (D / 2) ** 2 + V = Q / A + + # Calculate wave speed (Joukowsky formula) + a = math.sqrt(K / rho) / math.sqrt(1 + (K * D) / (E * e)) + + # Critical time + t_critical = 2 * L / a + + # Joukowsky pressure surge + delta_p_joukowsky = rho * a * V / 100000 # Convert to bar + + # Time reduction factor + if t_close <= t_critical: + time_factor = 1.0 + else: + time_factor = t_critical / t_close + + # Damper effect + damper_factor = 1.0 + damper_efficiency = 0.0 + damper_effective_volume = 0.0 + + if params["damper_enabled"]: + # Detailed hydropneumatic damper calculation + damper_vol_total = params["damper_volume"] / 1000 # Convert L to m³ + damper_p0 = params["damper_precharge"] * 100000 # Convert bar to Pa + damper_gas_vol = damper_vol_total * params["damper_gas_percentage"] / 100 + damper_pos = params["damper_position"] + + # Operating pressure + P_operation = params["pump_pressure"] * 100000 # Pa + + # Effective gas volume considering isothermal compression + # P0*V0 = P1*V1 (Boyle's Law) + damper_vol_effective = (damper_gas_vol * damper_p0) / P_operation + + # Equivalent compressibility of damper + fluid system + # C_eq = V_gas_effective / (V_pipe * K_fluid) + V_pipe = A * L + damper_compressibility = damper_vol_effective / (V_pipe * K / P_operation) + + # Reduction factor based on effective compressibility + # Simplified equation for hydropneumatic damper + C_total = 1 / K + damper_compressibility + C_original = 1 / K + compressibility_factor = C_original / C_total + + # Efficiency based on damper position + normalized_distance = damper_pos / L + position_efficiency = 1.0 - 0.3 * abs(normalized_distance - 0.9) + + # Final damper factor + damper_factor = 1.0 - (1.0 - compressibility_factor) * position_efficiency + + # Store damper results + damper_efficiency = (1.0 - damper_factor) * 100 # Percentage + damper_effective_volume = damper_vol_effective * 1000 # L + + # Total reduction factor + total_factor = time_factor * damper_factor + + # Real pressure surge + delta_p_real = delta_p_joukowsky * total_factor + + return { + "fluid_velocity": V, + "wave_speed": a, + "critical_time": t_critical, + "joukowsky_surge": delta_p_joukowsky, + "time_factor": time_factor, + "damper_factor": damper_factor, + "total_factor": total_factor, + "real_surge": delta_p_real, + "damper_efficiency": damper_efficiency, + "damper_effective_volume": damper_effective_volume, + } + + def simulate_transient(self, params, results): + """Generate transient simulation data""" + + t_sim = params["simulation_time"] + dt = 0.01 # Time step + times = np.arange(0, t_sim, dt) + + # System parameters + P0 = params["pump_pressure"] + Q0 = params["flow_rate"] / 3600 / 1000 # m³/s + t_close = params["closure_time"] + t_critical = results["critical_time"] + delta_p_real = results["real_surge"] + + # Generate pressure and flow profiles + pressures_start = [] + pressures_mid = [] + pressures_end = [] + flows = [] + + for t in times: + # Valve closure profile + if t <= t_close: + closure_factor = 1 - (t / t_close) + else: + closure_factor = 0 + + # Flow rate + Q_t = Q0 * closure_factor + flows.append(Q_t * 3600 * 1000) # Convert back to L/h + + # Pressure oscillations + # Simplified model - in reality this would be much more complex + + if t <= t_close: + # During closure + P_surge = P0 + delta_p_real * (t / t_close) + else: + # After closure - oscillations + phase = (t - t_close) * 2 * math.pi / t_critical + decay = math.exp(-(t - t_close) / (5 * t_critical)) # Damping + P_surge = P0 + delta_p_real * decay * math.cos(phase) + + # Pressure at different points + pressures_start.append(max(0, P_surge)) + # Slightly lower at midpoint + pressures_mid.append(max(0, P_surge * 0.8)) + pressures_end.append(max(0, P_surge * 0.6)) # Lower at end + + return { + "times": times.tolist(), + "pressures_start": pressures_start, + "pressures_mid": pressures_mid, + "pressures_end": pressures_end, + "flows": flows, + } + + +class DataManager: + """Manage persistent data storage""" + + def __init__(self, data_dir, project_id, project_name): + # Ensure data_dir is an absolute path + self.data_dir = os.path.abspath(data_dir) + self.project_id = project_id + self.project_name = project_name + self.config_file = os.path.join(self.data_dir, "hammer_config.json") + self.results_file = os.path.join(self.data_dir, "hammer_results.json") + + # Ensure data directory exists + os.makedirs(self.data_dir, exist_ok=True) + + def load_config(self): + """Load configuration from file""" + if os.path.exists(self.config_file): + try: + with open(self.config_file, "r") as f: + return json.load(f) + except Exception as e: + logging.warning(f"Error loading config: {e}") + return {} + + def save_config(self, config): + """Save configuration to file""" + try: + with open(self.config_file, "w") as f: + json.dump(config, f, indent=2) + except Exception as e: + logging.error(f"Error saving config: {e}") + + def save_results(self, results): + """Save calculation results""" + try: + with open(self.results_file, "w") as f: + json.dump(results, f, indent=2) + except Exception as e: + logging.error(f"Error saving results: {e}") + + +class ExportManager: + """Manage export functionality for reports and visualizations""" + + def __init__(self, data_dir, project_id, project_name, execution_logger): + # Ensure data_dir is an absolute path + self.data_dir = os.path.abspath(data_dir) + self.project_id = project_id + self.project_name = project_name + self.logger = execution_logger + self.exports_dir = os.path.join(self.data_dir, "exports") + os.makedirs(self.exports_dir, exist_ok=True) + + def generate_plots(self, params, results): + """Generate matplotlib plots for export""" + if not MATPLOTLIB_AVAILABLE: + raise ImportError("matplotlib is required for plot generation") + + plots = {} + + # Configure matplotlib for better PDF output + plt.style.use("default") + plt.rcParams["figure.dpi"] = 150 + plt.rcParams["savefig.dpi"] = 300 + plt.rcParams["font.size"] = 10 + + # Plot 1: Pressure vs Time + fig1, ax1 = plt.subplots(figsize=(10, 6)) + simulation = results["simulation"] + times = simulation["times"] + + ax1.plot( + times, simulation["pressures_start"], "b-", linewidth=2, label="Pipe Start" + ) + ax1.plot( + times, simulation["pressures_mid"], "g-", linewidth=2, label="Pipe Middle" + ) + ax1.plot( + times, simulation["pressures_end"], "r-", linewidth=2, label="Pipe End" + ) + + ax1.set_xlabel("Time (s)") + ax1.set_ylabel("Pressure (bar)") + ax1.set_title("Water Hammer Pressure Transient Analysis") + ax1.grid(True, alpha=0.3) + ax1.legend() + + # Add horizontal line for initial pressure + ax1.axhline( + y=params["pump_pressure"], + color="gray", + linestyle="--", + alpha=0.7, + label="Initial Pressure", + ) + + plots["pressure_plot"] = fig1 + + # Plot 2: Flow Rate vs Time + fig2, ax2 = plt.subplots(figsize=(10, 6)) + ax2.plot(times, simulation["flows"], "purple", linewidth=2) + ax2.set_xlabel("Time (s)") + ax2.set_ylabel("Flow Rate (L/h)") + ax2.set_title("Flow Rate During Valve Closure") + ax2.grid(True, alpha=0.3) + + plots["flow_plot"] = fig2 + + # Plot 3: System Parameters Summary + fig3, ((ax3a, ax3b), (ax3c, ax3d)) = plt.subplots(2, 2, figsize=(12, 8)) + + # Wave speed comparison + parameters = results["parameters"] + ax3a.bar( + ["Fluid Velocity", "Wave Speed"], + [parameters["fluid_velocity"], parameters["wave_speed"]], + color=["blue", "red"], + alpha=0.7, + ) + ax3a.set_ylabel("Velocity (m/s)") + ax3a.set_title("Fluid vs Wave Velocity") + + # Pressure components + pressures = ["Initial", "Joukowsky", "Real Surge"] + pressure_values = [ + params["pump_pressure"], + parameters["joukowsky_surge"], + parameters["real_surge"], + ] + ax3b.bar( + pressures, pressure_values, color=["green", "orange", "red"], alpha=0.7 + ) + ax3b.set_ylabel("Pressure (bar)") + ax3b.set_title("Pressure Analysis") + + # Time factors + factors = ["Time Factor", "Damper Factor", "Total Factor"] + factor_values = [ + parameters["time_factor"], + parameters["damper_factor"], + parameters["total_factor"], + ] + ax3c.bar(factors, factor_values, color=["blue", "purple", "black"], alpha=0.7) + ax3c.set_ylabel("Reduction Factor") + ax3c.set_title("Mitigation Factors") + ax3c.set_ylim(0, 1.1) + + # Damper effectiveness (if enabled) + if params.get("damper_enabled", False): + damper_data = ["Efficiency (%)", "Effective Vol (L)"] + damper_values = [ + parameters["damper_efficiency"], + parameters["damper_effective_volume"], + ] + ax3d.bar(damper_data, damper_values, color=["cyan", "magenta"], alpha=0.7) + ax3d.set_title("Damper Performance") + else: + ax3d.text( + 0.5, + 0.5, + "No Damper\nConfigured", + horizontalalignment="center", + verticalalignment="center", + transform=ax3d.transAxes, + fontsize=12, + ) + ax3d.set_title("Damper Performance") + + plt.tight_layout() + plots["summary_plot"] = fig3 + + return plots + + def save_plot_as_image(self, plot, filename): + """Save a matplotlib plot as PNG image""" + filepath = os.path.join(self.exports_dir, f"{filename}.png") + plot.savefig( + filepath, dpi=300, bbox_inches="tight", facecolor="white", edgecolor="none" + ) + plt.close(plot) # Free memory + return filepath + + def export_to_pdf(self, params, results): + """Export complete analysis to PDF""" + if not REPORTLAB_AVAILABLE: + raise ImportError("reportlab is required for PDF generation") + + if not MATPLOTLIB_AVAILABLE: + raise ImportError("matplotlib is required for plot generation") + + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + pdf_filename = f"hammer_analysis_{timestamp}.pdf" + pdf_path = os.path.join(self.exports_dir, pdf_filename) + + # Ensure the exports directory exists + os.makedirs(self.exports_dir, exist_ok=True) + + # Generate plots + plots = self.generate_plots(params, results) + + # Ensure the parent directory of the PDF file exists + pdf_dir = os.path.dirname(pdf_path) + os.makedirs(pdf_dir, exist_ok=True) + + # Create PDF document + doc = SimpleDocTemplate(pdf_path, pagesize=A4) + styles = getSampleStyleSheet() + story = [] + + # Title + title_style = ParagraphStyle( + "CustomTitle", + parent=styles["Heading1"], + fontSize=18, + spaceAfter=30, + alignment=1, # Center alignment + ) + story.append(Paragraph("Water Hammer Analysis Report", title_style)) + story.append(Spacer(1, 20)) + + # Project information + story.append(Paragraph("Project Information", styles["Heading2"])) + project_info = f""" + Project ID: {self.project_id}
+ Analysis Date: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}
+ Execution ID: {self.logger.execution_id}
+ """ + story.append(Paragraph(project_info, styles["Normal"])) + story.append(Spacer(1, 20)) + + # System Parameters + story.append(Paragraph("System Parameters", styles["Heading2"])) + params_table_data = [ + ["Parameter", "Value", "Unit"], + ["Pipe Length", f"{params['pipe_length']:.1f}", "m"], + ["Pipe Diameter", f"{params['pipe_diameter']*1000:.1f}", "mm"], + ["Wall Thickness", f"{params['wall_thickness']*1000:.1f}", "mm"], + ["Flow Rate", f"{params['flow_rate']:.0f}", "L/h"], + ["Pump Pressure", f"{params['pump_pressure']:.1f}", "bar"], + ["Fluid Density", f"{params['fluid_density']:.0f}", "kg/m³"], + ["Closure Time", f"{params['closure_time']:.1f}", "s"], + [ + "Damper Enabled", + "Yes" if params.get("damper_enabled", False) else "No", + "", + ], + ] + + params_table = Table(params_table_data) + params_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, 0), colors.grey), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, -1), "CENTER"), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("FONTSIZE", (0, 0), (-1, 0), 12), + ("BOTTOMPADDING", (0, 0), (-1, 0), 12), + ("BACKGROUND", (0, 1), (-1, -1), colors.beige), + ("GRID", (0, 0), (-1, -1), 1, colors.black), + ] + ) + ) + story.append(params_table) + story.append(Spacer(1, 20)) + + # Results Summary + story.append(Paragraph("Analysis Results", styles["Heading2"])) + parameters = results["parameters"] + results_table_data = [ + ["Result", "Value", "Unit"], + ["Fluid Velocity", f"{parameters['fluid_velocity']:.2f}", "m/s"], + ["Wave Speed", f"{parameters['wave_speed']:.1f}", "m/s"], + ["Critical Time", f"{parameters['critical_time']:.2f}", "s"], + ["Joukowsky Surge", f"{parameters['joukowsky_surge']:.1f}", "bar"], + ["Real Surge", f"{parameters['real_surge']:.1f}", "bar"], + ["Total Reduction Factor", f"{parameters['total_factor']:.3f}", ""], + ] + + if params.get("damper_enabled", False): + results_table_data.extend( + [ + [ + "Damper Efficiency", + f"{parameters['damper_efficiency']:.1f}", + "%", + ], + [ + "Effective Volume", + f"{parameters['damper_effective_volume']:.1f}", + "L", + ], + ] + ) + + results_table = Table(results_table_data) + results_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, 0), colors.grey), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.whitesmoke), + ("ALIGN", (0, 0), (-1, -1), "CENTER"), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("FONTSIZE", (0, 0), (-1, 0), 12), + ("BOTTOMPADDING", (0, 0), (-1, 0), 12), + ("BACKGROUND", (0, 1), (-1, -1), colors.beige), + ("GRID", (0, 0), (-1, -1), 1, colors.black), + ] + ) + ) + story.append(results_table) + story.append(Spacer(1, 30)) + + # Add plots to PDF + story.append(Paragraph("Analysis Plots", styles["Heading2"])) + + for plot_name, plot in plots.items(): + # Save plot as temporary image + temp_image_path = self.save_plot_as_image(plot, f"temp_{plot_name}") + + # Add image to PDF + img = RLImage(temp_image_path, width=7 * inch, height=4 * inch) + story.append(img) + story.append(Spacer(1, 20)) + + # Build PDF + doc.build(story) + + # Clean up temporary images + for file in os.listdir(self.exports_dir): + if file.startswith("temp_") and file.endswith(".png"): + os.remove(os.path.join(self.exports_dir, file)) + + self.logger.log_user_action("exported_pdf", pdf_filename) + return pdf_path + + def export_to_obsidian_zip(self, params, results): + """Export as Obsidian-style Markdown with attached images in ZIP""" + try: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + zip_filename = f"hammer_analysis_obsidian_{timestamp}.zip" + zip_path = os.path.join(self.exports_dir, zip_filename) + + # Debug logging to understand path construction + self.logger.log_info(f"DEBUG: exports_dir = {self.exports_dir}") + self.logger.log_info(f"DEBUG: zip_path = {zip_path}") + + # Ensure the exports directory exists + os.makedirs(self.exports_dir, exist_ok=True) + + # Generate plots and save as images + plots = self.generate_plots(params, results) + image_paths = {} + + for plot_name, plot in plots.items(): + image_filename = f"{plot_name}_{timestamp}.png" + image_path = self.save_plot_as_image( + plot, image_filename.replace(".png", "") + ) + image_paths[plot_name] = image_filename + + # Generate Markdown content + md_content = self._generate_markdown_content( + params, results, image_paths, timestamp + ) + + # Create ZIP file + # Ensure the parent directory of the ZIP file exists + zip_dir = os.path.dirname(zip_path) + os.makedirs(zip_dir, exist_ok=True) + + with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf: + # Add markdown file + md_filename = f"Water_Hammer_Analysis_{timestamp}.md" + zipf.writestr(md_filename, md_content) + + # Add image files + attachments_dir = "attachments" + for plot_name, image_filename in image_paths.items(): + actual_image_path = os.path.join(self.exports_dir, image_filename) + if os.path.exists(actual_image_path): + zipf.write( + actual_image_path, f"{attachments_dir}/{image_filename}" + ) + # Clean up individual image file - temporarily disabled for debugging + # os.remove(actual_image_path) + + self.logger.log_user_action("exported_obsidian_zip", zip_filename) + return zip_path + + except Exception as e: + self.logger.log_error(f"Error during Obsidian export: {str(e)}") + # If the file was created despite the error, return it anyway + if ( + "zip_path" in locals() + and os.path.exists(zip_path) + and os.path.getsize(zip_path) > 0 + ): + self.logger.log_info(f"Export file created successfully despite error") + return zip_path + else: + raise e + + def _generate_markdown_content(self, params, results, image_paths, timestamp): + """Generate Markdown content for Obsidian export""" + + md_content = f"""# Water Hammer Analysis Report + +**Generated on:** {datetime.now().strftime("%Y-%m-%d %H:%M:%S")} +**Project:** {self.project_name} +**Project ID:** {self.project_id} +**Execution ID:** {self.logger.execution_id} + +--- + +## System Parameters + +| Parameter | Value | Unit | +|-----------|-------|------| +| Pipe Length | {params['pipe_length']:.1f} | m | +| Pipe Diameter | {params['pipe_diameter']*1000:.1f} | mm | +| Wall Thickness | {params['wall_thickness']*1000:.1f} | mm | +| Flow Rate | {params['flow_rate']:.0f} | L/h | +| Pump Pressure | {params['pump_pressure']:.1f} | bar | +| Fluid Density | {params['fluid_density']:.0f} | kg/m³ | +| Fluid Temperature | {params['fluid_temperature']:.1f} | °C | +| Closure Time | {params['closure_time']:.1f} | s | +| Damper Enabled | {'Yes' if params.get('damper_enabled', False) else 'No'} | | + +### Damper Configuration +""" + + if params.get("damper_enabled", False): + md_content += f""" +| Parameter | Value | Unit | +|-----------|-------|------| +| Damper Volume | {params['damper_volume']:.1f} | L | +| Precharge Pressure | {params['damper_precharge']:.1f} | bar | +| Gas Percentage | {params['damper_gas_percentage']:.1f} | % | +| Position | {params['damper_position']:.1f} | m | +| Connection Diameter | {params['damper_connection_diameter']*1000:.1f} | mm | +""" + else: + md_content += "\n*No damper configured for this analysis.*\n" + + md_content += f""" +--- + +## Analysis Results + +### Key Parameters + +| Result | Value | Unit | +|--------|-------|------| +| Fluid Velocity | {results['parameters']['fluid_velocity']:.2f} | m/s | +| Wave Speed | {results['parameters']['wave_speed']:.1f} | m/s | +| Critical Time | {results['parameters']['critical_time']:.2f} | s | +| Joukowsky Surge | {results['parameters']['joukowsky_surge']:.1f} | bar | +| **Real Surge** | **{results['parameters']['real_surge']:.1f}** | **bar** | +| Time Factor | {results['parameters']['time_factor']:.3f} | | +| Damper Factor | {results['parameters']['damper_factor']:.3f} | | +| **Total Factor** | **{results['parameters']['total_factor']:.3f}** | | +""" + + if params.get("damper_enabled", False): + md_content += f""" +### Damper Performance + +| Metric | Value | Unit | +|--------|-------|------| +| Damper Efficiency | {results['parameters']['damper_efficiency']:.1f} | % | +| Effective Volume | {results['parameters']['damper_effective_volume']:.1f} | L | +""" + + md_content += f""" +--- + +## Visualization + +### Pressure Transient Analysis +![[attachments/{image_paths['pressure_plot']}]] + +*Pressure evolution at different points along the pipeline during the transient event.* + +### Flow Rate Evolution +![[attachments/{image_paths['flow_plot']}]] + +*Flow rate variation during valve closure operation.* + +### System Parameters Summary +![[attachments/{image_paths['summary_plot']}]] + +*Comprehensive summary of system parameters, pressure components, and mitigation factors.* + +--- + +## Safety Assessment + +### Risk Evaluation +""" + + surge = results["parameters"]["real_surge"] + if surge > 20: + risk_level = "🔴 **HIGH RISK**" + recommendations = [ + "Immediate action required", + "Consider hydropneumatic damper installation", + "Implement slower valve closure", + "Review system design", + ] + elif surge > 10: + risk_level = "🟡 **MEDIUM RISK**" + recommendations = [ + "Monitoring recommended", + "Consider protection measures", + "Evaluate damper installation", + ] + else: + risk_level = "🟢 **LOW RISK**" + recommendations = [ + "System within acceptable limits", + "Continue regular monitoring", + ] + + md_content += f""" +**Overall Risk Level:** {risk_level} + +### Recommendations +""" + for rec in recommendations: + md_content += f"- {rec}\n" + + if results["parameters"]["time_factor"] > 0.8: + md_content += "- Consider slower valve closure to reduce surge\n" + + if not params.get("damper_enabled", False) and surge > 5: + md_content += ( + "- Consider installing hydropneumatic damper for surge protection\n" + ) + + md_content += f""" +--- + +## Technical Notes + +### Calculation Method +- **Wave Speed:** Joukowsky formula with pipe elasticity correction +- **Pressure Surge:** Modified Joukowsky equation with time and damper factors +- **Damper Analysis:** Isothermal gas compression model +- **Transient Simulation:** Simplified waterhammer equations + +### Assumptions +- Isothermal process for gas compression +- Uniform pipe properties +- Incompressible fluid (except for wave propagation) +- Instantaneous valve operation (linear closure) + +### References +- Wylie, E.B. & Streeter, V.L. "Fluid Transients in Systems" +- Watters, G.Z. "Analysis and Control of Unsteady Flow in Pipelines" + +--- + +*Report generated by SIDEL Water Hammer Simulator v1.0* +*Timestamp: {timestamp}* +""" + + return md_content + + +def create_flask_app( + data_manager, + user_level, + project_id, + project_name, + port, + user_id, + execution_logger, + session_manager, + theme="light", + language="en", +): + """Create and configure Flask application""" + + # Get the directory of this script for static files + script_dir = os.path.dirname(os.path.abspath(__file__)) + + app = Flask( + __name__, + static_folder=os.path.join(script_dir, "static"), + template_folder=os.path.join(script_dir, "templates"), + ) + app.secret_key = f"hammer_sim_{project_id}_{port}" + + # Initialize SocketIO for real-time logging + socketio = SocketIO(app, cors_allowed_origins="*") + + # Initialize calculator and export manager + calculator = WaterHammerCalculator() + export_manager = ExportManager( + data_manager.data_dir, project_id, project_name, execution_logger + ) + + execution_logger.log_info("Flask application initialized") + execution_logger.log_info(f"Static folder: {app.static_folder}") + execution_logger.log_info(f"Template folder: {app.template_folder}") + + @app.route("/") + def index(): + """Main application page""" + execution_logger.log_user_action("accessed_main_page") + session_manager.update_activity() + return render_template( + "hammer_simulator.html", + user_level=user_level, + project_id=project_id, + project_name=project_name, + user_id=user_id, + execution_id=execution_logger.execution_id, + theme=theme, + language=language, + ) + + @app.route("/api/parameters", methods=["GET"]) + def get_parameters(): + """Get current parameters""" + execution_logger.log_user_action("requested_parameters") + session_manager.update_activity() + + saved_config = data_manager.load_config() + params = calculator.default_params.copy() + params.update(saved_config) + + execution_logger.log_info(f"Returned {len(params)} parameters") + return jsonify(params) + + @app.route("/api/parameters", methods=["POST"]) + def save_parameters(): + """Save parameters""" + params = request.get_json() + execution_logger.log_user_action( + "saved_parameters", f"Updated {len(params)} parameters" + ) + session_manager.update_activity() + + data_manager.save_config(params) + execution_logger.log_info("Parameters saved successfully") + return jsonify({"status": "success"}) + + @app.route("/api/calculate", methods=["POST"]) + def calculate(): + """Perform water hammer calculations""" + params = request.get_json() + execution_logger.log_user_action("started_calculation") + session_manager.update_activity() + + try: + # Ensure all required parameters have default values + default_params = calculator.default_params.copy() + default_params.update(params) + + # Calculate system parameters + results = calculator.calculate_system_parameters(default_params) + + # Generate transient simulation + simulation_data = calculator.simulate_transient(default_params, results) + + # Combine results + full_results = { + "parameters": results, + "simulation": simulation_data, + "timestamp": datetime.now().isoformat(), + "execution_id": execution_logger.execution_id, + "user_id": user_id, + "project_id": project_id, + } + + # Log calculation + execution_logger.log_calculation(default_params, full_results) + + # Save results + data_manager.save_results(full_results) + + execution_logger.log_user_action( + "completed_calculation", "Calculation successful" + ) + + # Emit real-time update via WebSocket + socketio.emit( + "calculation_complete", + { + "execution_id": execution_logger.execution_id, + "timestamp": datetime.now().isoformat(), + "status": "success", + }, + ) + + return jsonify(full_results) + + except Exception as e: + execution_logger.log_error(f"Calculation failed: {str(e)}") + execution_logger.log_user_action("calculation_failed", str(e)) + + # Emit error via WebSocket + socketio.emit( + "calculation_error", + { + "execution_id": execution_logger.execution_id, + "error": str(e), + "timestamp": datetime.now().isoformat(), + }, + ) + + return jsonify({"error": str(e)}), 500 + + @app.route("/api/presets/", methods=["GET"]) + def get_preset(preset_name): + """Get predefined parameter presets""" + presets = { + "rapid_closure": {"closure_time": 0.5, "damper_enabled": False}, + "slow_closure": {"closure_time": 10.0, "damper_enabled": False}, + "with_damper": { + "closure_time": 2.0, + "damper_enabled": True, + "damper_volume": 100.0, + "damper_precharge": 5.0, + }, + "critical_system": { + "closure_time": 0.2, + "pump_pressure": 15.0, + "flow_rate": 40000.0, + }, + } + + if preset_name in presets: + return jsonify(presets[preset_name]) + else: + return jsonify({"error": "Preset not found"}), 404 + + @app.route("/api/safety_evaluation", methods=["POST"]) + def safety_evaluation(): + """Evaluate system safety""" + results = request.get_json() + + evaluation = {"overall_risk": "low", "recommendations": [], "warnings": []} + + # Check pressure surge + surge = results["parameters"]["real_surge"] + if surge > 20: + evaluation["overall_risk"] = "high" + evaluation["warnings"].append("Extremely high pressure surge detected") + elif surge > 10: + evaluation["overall_risk"] = "medium" + evaluation["warnings"].append( + "High pressure surge - consider protection measures" + ) + + # Check closure time + if results["parameters"]["time_factor"] > 0.8: + evaluation["recommendations"].append("Consider slower valve closure") + + # Damper recommendations + if not results.get("damper_enabled", False) and surge > 5: + evaluation["recommendations"].append( + "Consider installing hydropneumatic damper" + ) + + return jsonify(evaluation) + + @app.route("/api/generate_diagram", methods=["POST"]) + def generate_diagram(): + """Generate system diagram using PlantUML""" + try: + data = request.get_json() + params = data.get("params", {}) + results = data.get("results", {}) + + execution_logger.log_user_action("generate_diagram_requested") + session_manager.update_activity() + + # Check if requests library is available + if not REQUESTS_AVAILABLE: + return ( + jsonify( + { + "error": "PlantUML integration not available - " + + "requests library missing" + } + ), + 500, + ) + + # Create PlantUML generator + plantuml_generator = PlantUMLGenerator() + + # Generate PlantUML code + plantuml_code = plantuml_generator.generate_system_diagram(params, results) + + # Try to render the diagram + try: + image_bytes = plantuml_generator.render_diagram(plantuml_code) + + # Convert to base64 for sending to client + image_base64 = base64.b64encode(image_bytes).decode("utf-8") + + execution_logger.log_user_action("diagram_generated_successfully") + + return jsonify( + { + "success": True, + "plantuml_code": plantuml_code, + "image_base64": image_base64, + "image_format": "png", + } + ) + + except Exception as render_error: + # If rendering fails, still return the PlantUML code + execution_logger.log_error( + f"Diagram rendering failed: {str(render_error)}" + ) + + return jsonify( + { + "success": False, + "plantuml_code": plantuml_code, + "error": f"Rendering failed: {str(render_error)}", + "fallback": True, + } + ) + + except Exception as e: + execution_logger.log_error(f"Diagram generation failed: {str(e)}") + return jsonify({"error": f"Diagram generation failed: {str(e)}"}), 500 + + @app.route("/api/export/pdf", methods=["POST"]) + def export_pdf(): + """Export analysis results to PDF""" + try: + data = request.get_json() + params = data.get("params", {}) + results = data.get("results", {}) + + execution_logger.log_user_action("export_pdf_requested") + session_manager.update_activity() + + if not REPORTLAB_AVAILABLE or not MATPLOTLIB_AVAILABLE: + missing_libs = [] + if not REPORTLAB_AVAILABLE: + missing_libs.append("reportlab") + if not MATPLOTLIB_AVAILABLE: + missing_libs.append("matplotlib") + + return ( + jsonify( + { + "error": f"Missing required libraries: {', '.join(missing_libs)}", + "details": "Please install: pip install reportlab matplotlib", + } + ), + 400, + ) + + pdf_path = export_manager.export_to_pdf(params, results) + pdf_filename = os.path.basename(pdf_path) + + execution_logger.log_info(f"PDF exported successfully: {pdf_filename}") + + return send_file( + pdf_path, + as_attachment=True, + download_name=pdf_filename, + mimetype="application/pdf", + ) + + except Exception as e: + execution_logger.log_error(f"PDF export failed: {str(e)}") + return jsonify({"error": str(e)}), 500 + + @app.route("/api/export/obsidian", methods=["POST"]) + def export_obsidian(): + """Export analysis results as Obsidian-style ZIP""" + try: + data = request.get_json() + params = data.get("params", {}) + results = data.get("results", {}) + + execution_logger.log_user_action("export_obsidian_requested") + session_manager.update_activity() + + if not MATPLOTLIB_AVAILABLE: + return ( + jsonify( + { + "error": "Missing required library: matplotlib", + "details": "Please install: pip install matplotlib", + } + ), + 400, + ) + + try: + zip_path = export_manager.export_to_obsidian_zip(params, results) + except Exception as export_error: + # Log the error but check if file was created anyway + execution_logger.log_error( + f"Export method threw error: {str(export_error)}" + ) + + # Try to find the most recent export file as fallback + import glob + + export_pattern = os.path.join( + export_manager.exports_dir, "hammer_analysis_obsidian_*.zip" + ) + export_files = glob.glob(export_pattern) + + if export_files: + # Get the most recent file + zip_path = max(export_files, key=os.path.getctime) + execution_logger.log_info( + f"Using most recent export file despite error: {os.path.basename(zip_path)}" + ) + else: + # No files found, this is a real failure + raise export_error + + zip_filename = os.path.basename(zip_path) + + # Verify the file was actually created successfully + if os.path.exists(zip_path) and os.path.getsize(zip_path) > 0: + execution_logger.log_info( + f"Obsidian ZIP exported successfully: {zip_filename}" + ) + + return send_file( + zip_path, + as_attachment=True, + download_name=zip_filename, + mimetype="application/zip", + ) + else: + execution_logger.log_error( + f"Export file not created or empty: {zip_path}" + ) + return ( + jsonify({"error": "Export file was not created successfully"}), + 500, + ) + + except Exception as e: + execution_logger.log_error(f"Obsidian export failed: {str(e)}") + return jsonify({"error": str(e)}), 500 + + @app.route("/api/export/capabilities", methods=["GET"]) + def export_capabilities(): + """Get available export capabilities""" + capabilities = { + "pdf_available": REPORTLAB_AVAILABLE and MATPLOTLIB_AVAILABLE, + "obsidian_available": MATPLOTLIB_AVAILABLE, + "missing_dependencies": [], + } + + if not MATPLOTLIB_AVAILABLE: + capabilities["missing_dependencies"].append("matplotlib") + if not REPORTLAB_AVAILABLE: + capabilities["missing_dependencies"].append("reportlab") + + return jsonify(capabilities) + + @app.route("/api/status", methods=["GET"]) + def get_status(): + """Get script status""" + execution_logger.log_user_action("requested_status") + session_manager.update_activity() + + return jsonify( + { + "status": "running", + "user_level": user_level, + "user_id": user_id, + "project_id": project_id, + "project_name": project_name, + "port": port, + "execution_id": execution_logger.execution_id, + "session_id": session_manager.session_id, + "uptime_seconds": ( + datetime.now() - session_manager.start_time + ).total_seconds(), + "timestamp": datetime.now().isoformat(), + } + ) + + @app.route("/api/ping", methods=["POST"]) + def ping_interface(): + """Keep interface alive (heartbeat from browser tab)""" + session_manager.update_activity() + session["last_ping"] = datetime.now().isoformat() + + # Log periodic heartbeat (less frequently to avoid log spam) + if ( + not hasattr(ping_interface, "last_log") + or (datetime.now() - ping_interface.last_log).seconds > 300 + ): # Log every 5 minutes + execution_logger.log_session_event("heartbeat_received") + ping_interface.last_log = datetime.now() + + return jsonify( + { + "status": "alive", + "session_active": not session_manager.is_expired(), + "timestamp": datetime.now().isoformat(), + } + ) + + @app.route("/api/shutdown", methods=["POST"]) + def shutdown(): + """Graceful shutdown endpoint""" + execution_logger.log_session_event("shutdown_requested") + execution_logger.log_info("Graceful shutdown initiated") + + # Cleanup session + session_manager.cleanup() + + # Finalize execution logging + execution_logger.finalize_execution(0) + + return jsonify({"status": "shutting_down"}) + + @app.route("/api/logs/current", methods=["GET"]) + def get_current_logs(): + """Get current execution logs""" + execution_logger.log_user_action("requested_current_logs") + session_manager.update_activity() + + try: + if os.path.exists(execution_logger.log_file): + with open(execution_logger.log_file, "r", encoding="utf-8") as f: + logs = f.read() + return jsonify( + { + "logs": logs, + "execution_id": execution_logger.execution_id, + "log_file": execution_logger.log_file, + } + ) + else: + return jsonify( + {"logs": "", "execution_id": execution_logger.execution_id} + ) + except Exception as e: + execution_logger.log_error(f"Error reading logs: {str(e)}") + return jsonify({"error": str(e)}), 500 + + # WebSocket events + @socketio.on("connect") + def handle_connect(): + execution_logger.log_session_event("websocket_connected") + emit("connected", {"execution_id": execution_logger.execution_id}) + + @socketio.on("disconnect") + def handle_disconnect(): + execution_logger.log_session_event("websocket_disconnected") + + @socketio.on("request_log_stream") + def handle_log_stream(): + """Handle request for log streaming""" + execution_logger.log_user_action("requested_log_stream") + + # Send current logs + if os.path.exists(execution_logger.log_file): + with open(execution_logger.log_file, "r", encoding="utf-8") as f: + logs = f.read() + emit( + "log_update", + {"logs": logs, "execution_id": execution_logger.execution_id}, + ) + + return app, socketio + + +def open_browser(port): + """Open browser tab after Flask server starts""" + time.sleep(1) # Wait for Flask to fully start + url = f"http://127.0.0.1:{port}" + webbrowser.open_new_tab(url) + logging.info(f"Opened browser tab: {url}") + + +def main(): + """Main function""" + # Parse arguments + args = parse_arguments() + + # Setup basic logging + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + # Initialize execution logger + execution_logger = ExecutionLogger( + args.data_dir, args.user_id, args.project_id, "hammer_simulator.py" + ) + + # Initialize session manager + session_manager = SessionManager(args.user_id, args.project_id, execution_logger) + + try: + # Initialize data manager + data_manager = DataManager(args.data_dir, args.project_id, args.project_name) + execution_logger.log_info("Data manager initialized") + + # Create Flask app with all components + app, socketio = create_flask_app( + data_manager, + args.user_level, + args.project_id, + args.project_name, + args.port, + args.user_id, + execution_logger, + session_manager, + args.theme, + args.language, + ) + + execution_logger.log_info("Flask application created successfully") + + # Note: Browser opening is now handled by ScriptsManager frontend + # to ensure it opens in the same browser window/tab context + # browser_thread = threading.Thread(target=open_browser, args=(args.port,)) + # browser_thread.daemon = True + # browser_thread.start() + # execution_logger.log_info("Browser opening thread started") + + # Log startup completion + execution_logger.log_info( + f"Starting Water Hammer Simulator for project: {args.project_name}" + ) + execution_logger.log_info(f"Running on port {args.port}") + execution_logger.log_session_event("application_started") + + # Run Flask app with SocketIO + # Clear any existing Werkzeug environment variables to avoid conflicts + if "WERKZEUG_SERVER_FD" in os.environ: + del os.environ["WERKZEUG_SERVER_FD"] + if "WERKZEUG_RUN_MAIN" in os.environ: + del os.environ["WERKZEUG_RUN_MAIN"] + + execution_logger.log_info("About to start SocketIO server") + socketio.run( + app, + host="0.0.0.0", + port=args.port, + debug=False, + allow_unsafe_werkzeug=True, + use_reloader=False, # Disable reloader to avoid fd conflicts + ) + execution_logger.log_info("SocketIO server ended") + + except KeyboardInterrupt: + execution_logger.log_session_event("interrupted_by_user") + execution_logger.finalize_execution(1) + session_manager.cleanup() + except Exception as e: + execution_logger.log_error(f"Application error: {str(e)}") + execution_logger.finalize_execution(1) + session_manager.cleanup() + raise + + +if __name__ == "__main__": + main() + +# === FIN CÓDIGO DEL USUARIO === + +if __name__ == "__main__": + app.run(host="127.0.0.1", port=5201, debug=False, threaded=True)