Add UI and styling documentation; remove idempotency and logging audits
Some checks failed
CI / test (pull_request) Failing after 1m8s

- Introduced a new document outlining UI structure, reusable template components, CSS variable conventions, and per-page data/actions for the CalMiner application.
- Removed outdated idempotency audit and logging audit documents as they are no longer relevant.
- Updated quickstart guide to streamline developer setup instructions and link to relevant documentation.
- Created a roadmap document detailing scenario enhancements and data management strategies.
- Deleted the seed data plan document to consolidate information into the setup process.
- Refactored setup_database.py for improved logging and error handling during database setup and migration processes.
This commit is contained in:
2025-10-29 13:20:44 +01:00
parent 1f58de448c
commit 04d7f202b6
19 changed files with 609 additions and 752 deletions

View File

@@ -250,7 +250,7 @@ class DatabaseSetup:
descriptor = self._describe_connection(
self.config.admin_user, self.config.admin_database
)
logger.info("Validating admin connection (%s)", descriptor)
logger.info("[CONNECT] Validating admin connection (%s)", descriptor)
try:
with self._admin_connection(self.config.admin_database) as conn:
with conn.cursor() as cursor:
@@ -261,13 +261,14 @@ class DatabaseSetup:
"Check DATABASE_ADMIN_URL or DATABASE_SUPERUSER settings."
f" Target: {descriptor}"
) from exc
logger.info("Admin connection verified (%s)", descriptor)
logger.info("[CONNECT] Admin connection verified (%s)", descriptor)
def validate_application_connection(self) -> None:
descriptor = self._describe_connection(
self.config.user, self.config.database
)
logger.info("Validating application connection (%s)", descriptor)
logger.info(
"[CONNECT] Validating application connection (%s)", descriptor)
try:
with self._application_connection() as conn:
with conn.cursor() as cursor:
@@ -278,7 +279,8 @@ class DatabaseSetup:
"Ensure the role exists and credentials are correct. "
f"Target: {descriptor}"
) from exc
logger.info("Application connection verified (%s)", descriptor)
logger.info(
"[CONNECT] Application connection verified (%s)", descriptor)
def ensure_database(self) -> None:
"""Create the target database when it does not already exist."""
@@ -586,31 +588,28 @@ class DatabaseSetup:
except RuntimeError:
raise
def _connect(self, dsn: str, descriptor: str) -> PGConnection:
try:
return psycopg2.connect(dsn)
except psycopg2.Error as exc:
raise RuntimeError(
f"Unable to establish connection. Target: {descriptor}"
) from exc
def _admin_connection(self, database: Optional[str] = None) -> PGConnection:
target_db = database or self.config.admin_database
dsn = self.config.admin_dsn(database)
descriptor = self._describe_connection(
self.config.admin_user, target_db
)
try:
return psycopg2.connect(dsn)
except psycopg2.Error as exc:
raise RuntimeError(
"Unable to establish admin connection. " f"Target: {descriptor}"
) from exc
return self._connect(dsn, descriptor)
def _application_connection(self) -> PGConnection:
dsn = self.config.application_dsn()
descriptor = self._describe_connection(
self.config.user, self.config.database
)
try:
return psycopg2.connect(dsn)
except psycopg2.Error as exc:
raise RuntimeError(
"Unable to establish application connection. "
f"Target: {descriptor}"
) from exc
return self._connect(dsn, descriptor)
def initialize_schema(self) -> None:
"""Create database objects from SQLAlchemy metadata if missing."""
@@ -704,63 +703,9 @@ class DatabaseSetup:
cursor, schema_name
)
if baseline_path.exists() and baseline_name not in applied:
if self.dry_run:
logger.info(
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
baseline_name,
)
else:
logger.info(
"Baseline migration '%s' pending; applying and marking older migrations",
baseline_name,
)
try:
baseline_applied = self._apply_migration_file(
cursor, schema_name, baseline_path
)
except Exception:
logger.error(
"Failed while applying baseline migration '%s'."
" Review the migration contents and rerun with --dry-run for diagnostics.",
baseline_name,
exc_info=True,
)
raise
applied.add(baseline_applied)
legacy_files = [
path
for path in migration_files
if path.name != baseline_name
]
for legacy in legacy_files:
if legacy.name not in applied:
try:
cursor.execute(
sql.SQL(
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
).format(
sql.Identifier(
schema_name,
MIGRATIONS_TABLE,
)
),
(legacy.name,),
)
except Exception:
logger.error(
"Unable to record legacy migration '%s' after baseline application."
" Check schema_migrations table in schema '%s' for partial state.",
legacy.name,
schema_name,
exc_info=True,
)
raise
applied.add(legacy.name)
logger.info(
"Marked legacy migration '%s' as applied via baseline",
legacy.name,
)
self._handle_baseline_migration(
cursor, schema_name, baseline_path, baseline_name, migration_files, applied
)
pending = [
path for path in migration_files if path.name not in applied
@@ -784,6 +729,85 @@ class DatabaseSetup:
logger.info("Applied %d migrations", len(pending))
def _handle_baseline_migration(
self,
cursor: extensions.cursor,
schema_name: str,
baseline_path: Path,
baseline_name: str,
migration_files: list[Path],
applied: set[str],
) -> None:
if baseline_path.exists() and baseline_name not in applied:
if self.dry_run:
logger.info(
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
baseline_name,
)
else:
logger.info(
"[MIGRATE] Baseline migration '%s' pending; applying and marking older migrations",
baseline_name,
)
try:
baseline_applied = self._apply_migration_file(
cursor, schema_name, baseline_path
)
except Exception:
logger.error(
"Failed while applying baseline migration '%s'."
" Review the migration contents and rerun with --dry-run for diagnostics.",
baseline_name,
exc_info=True,
)
raise
applied.add(baseline_applied)
self._mark_legacy_migrations_as_applied(
cursor, schema_name, migration_files, baseline_name, applied
)
def _mark_legacy_migrations_as_applied(
self,
cursor: extensions.cursor,
schema_name: str,
migration_files: list[Path],
baseline_name: str,
applied: set[str],
) -> None:
legacy_files = [
path
for path in migration_files
if path.name != baseline_name
]
for legacy in legacy_files:
if legacy.name not in applied:
try:
cursor.execute(
sql.SQL(
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
).format(
sql.Identifier(
schema_name,
MIGRATIONS_TABLE,
)
),
(legacy.name,),
)
except Exception:
logger.error(
"Unable to record legacy migration '%s' after baseline application."
" Check schema_migrations table in schema '%s' for partial state.",
legacy.name,
schema_name,
exc_info=True,
)
raise
applied.add(legacy.name)
logger.info(
"Marked legacy migration '%s' as applied via baseline",
legacy.name,
)
def _apply_migration_file(
self,
cursor,
@@ -847,10 +871,18 @@ class DatabaseSetup:
dry_run=dry_run,
verbose=0,
)
seed_data.run_with_namespace(seed_args, config=self.config)
try:
seed_data.run_with_namespace(seed_args, config=self.config)
except Exception:
logger.error(
"[SEED] Failed during baseline data seeding. "
"Review seed_data.py and rerun with --dry-run for diagnostics.",
exc_info=True,
)
raise
if dry_run:
logger.info("Dry run: skipped seed verification")
logger.info("[SEED] Dry run: skipped seed verification")
return
expected_currencies = {
@@ -896,7 +928,7 @@ class DatabaseSetup:
raise RuntimeError(message)
logger.info(
"Verified %d seeded currencies present",
"[VERIFY] Verified %d seeded currencies present",
len(found_codes),
)
@@ -918,7 +950,8 @@ class DatabaseSetup:
logger.error(message)
raise RuntimeError(message)
else:
logger.info("Verified default currency 'USD' active")
logger.info(
"[VERIFY] Verified default currency 'USD' active")
if expected_unit_codes:
try: