- Added monitoring metrics for project creation success and error handling in `ProjectRepository`. - Implemented similar monitoring for scenario creation in `ScenarioRepository`. - Refactored `run_monte_carlo` function in `simulation.py` to include timing and success/error metrics. - Introduced new CSS styles for headers, alerts, and navigation buttons in `main.css` and `projects.css`. - Created a new JavaScript file for navigation logic to handle chevron buttons. - Updated HTML templates to include new navigation buttons and improved styling for buttons. - Added tests for reporting service and routes to ensure proper functionality and access control. - Removed unused imports and optimized existing test files for better clarity and performance.
27 lines
897 B
Python
27 lines
897 B
Python
from __future__ import annotations
|
|
|
|
from datetime import datetime
|
|
from typing import Optional
|
|
|
|
from sqlalchemy import Column, DateTime, Float, Integer, String
|
|
from sqlalchemy.ext.declarative import declarative_base
|
|
|
|
Base = declarative_base()
|
|
|
|
|
|
class PerformanceMetric(Base):
|
|
__tablename__ = "performance_metrics"
|
|
|
|
id = Column(Integer, primary_key=True, index=True)
|
|
timestamp = Column(DateTime, default=datetime.utcnow, index=True)
|
|
metric_name = Column(String, index=True)
|
|
value = Column(Float)
|
|
labels = Column(String) # JSON string of labels
|
|
endpoint = Column(String, index=True, nullable=True)
|
|
method = Column(String, nullable=True)
|
|
status_code = Column(Integer, nullable=True)
|
|
duration_seconds = Column(Float, nullable=True)
|
|
|
|
def __repr__(self) -> str:
|
|
return f"<PerformanceMetric(id={self.id}, name={self.metric_name}, value={self.value})>"
|