feat: Enhance project and scenario creation with monitoring metrics
Some checks failed
CI / lint (push) Failing after 1m14s
CI / test (push) Has been skipped
CI / build (push) Has been skipped

- Added monitoring metrics for project creation success and error handling in `ProjectRepository`.
- Implemented similar monitoring for scenario creation in `ScenarioRepository`.
- Refactored `run_monte_carlo` function in `simulation.py` to include timing and success/error metrics.
- Introduced new CSS styles for headers, alerts, and navigation buttons in `main.css` and `projects.css`.
- Created a new JavaScript file for navigation logic to handle chevron buttons.
- Updated HTML templates to include new navigation buttons and improved styling for buttons.
- Added tests for reporting service and routes to ensure proper functionality and access control.
- Removed unused imports and optimized existing test files for better clarity and performance.
This commit is contained in:
2025-11-12 10:36:24 +01:00
parent f68321cd04
commit ce9c174b53
61 changed files with 2124 additions and 308 deletions

View File

@@ -1,8 +1,7 @@
from __future__ import annotations
from typing import Iterable
from prometheus_client import Counter, Histogram
from prometheus_client import Counter, Histogram, Gauge
IMPORT_DURATION = Histogram(
"calminer_import_duration_seconds",
@@ -28,6 +27,54 @@ EXPORT_TOTAL = Counter(
labelnames=("dataset", "status", "format"),
)
# General performance metrics
REQUEST_DURATION = Histogram(
"calminer_request_duration_seconds",
"Duration of HTTP requests",
labelnames=("method", "endpoint", "status"),
)
REQUEST_TOTAL = Counter(
"calminer_request_total",
"Count of HTTP requests",
labelnames=("method", "endpoint", "status"),
)
ACTIVE_CONNECTIONS = Gauge(
"calminer_active_connections",
"Number of active connections",
)
DB_CONNECTIONS = Gauge(
"calminer_db_connections",
"Number of database connections",
)
# Business metrics
PROJECT_OPERATIONS = Counter(
"calminer_project_operations_total",
"Count of project operations",
labelnames=("operation", "status"),
)
SCENARIO_OPERATIONS = Counter(
"calminer_scenario_operations_total",
"Count of scenario operations",
labelnames=("operation", "status"),
)
SIMULATION_RUNS = Counter(
"calminer_simulation_runs_total",
"Count of Monte Carlo simulation runs",
labelnames=("status",),
)
SIMULATION_DURATION = Histogram(
"calminer_simulation_duration_seconds",
"Duration of Monte Carlo simulations",
labelnames=("status",),
)
def observe_import(action: str, dataset: str, status: str, seconds: float) -> None:
IMPORT_TOTAL.labels(dataset=dataset, action=action, status=status).inc()
@@ -40,3 +87,22 @@ def observe_export(dataset: str, status: str, export_format: str, seconds: float
format=export_format).inc()
EXPORT_DURATION.labels(dataset=dataset, status=status,
format=export_format).observe(seconds)
def observe_request(method: str, endpoint: str, status: int, seconds: float) -> None:
REQUEST_TOTAL.labels(method=method, endpoint=endpoint, status=status).inc()
REQUEST_DURATION.labels(method=method, endpoint=endpoint,
status=status).observe(seconds)
def observe_project_operation(operation: str, status: str = "success") -> None:
PROJECT_OPERATIONS.labels(operation=operation, status=status).inc()
def observe_scenario_operation(operation: str, status: str = "success") -> None:
SCENARIO_OPERATIONS.labels(operation=operation, status=status).inc()
def observe_simulation(status: str, duration_seconds: float) -> None:
SIMULATION_RUNS.labels(status=status).inc()
SIMULATION_DURATION.labels(status=status).observe(duration_seconds)