From 436492796553c2d2b3f834b9b9c4610612760844 Mon Sep 17 00:00:00 2001 From: zwitschi Date: Tue, 11 Nov 2025 18:30:15 +0100 Subject: [PATCH] Refactor Docker setup and migration scripts - Updated Dockerfile to set permissions for the entrypoint script and defined the entrypoint for the container. - Consolidated Alembic migration history into a single initial migration file and removed obsolete revision files. - Added a new script to run Alembic migrations before starting the application. - Updated changelog to reflect changes in migration handling and Docker setup. - Enhanced pytest configuration for coverage reporting and excluded specific files from coverage calculations. --- Dockerfile | 5 +- .../versions/20251109_01_initial_schema.py | 220 ------ .../versions/20251109_02_add_auth_tables.py | 210 ----- .../versions/20251111_00_initial_schema.py | 718 ++++++++++++++++++ changelog.md | 3 + main.py | 4 - pyproject.toml | 18 + scripts/docker-entrypoint.sh | 9 + scripts/run_migrations.py | 42 + 9 files changed, 794 insertions(+), 435 deletions(-) delete mode 100644 alembic/versions/20251109_01_initial_schema.py delete mode 100644 alembic/versions/20251109_02_add_auth_tables.py create mode 100644 alembic/versions/20251111_00_initial_schema.py create mode 100644 scripts/docker-entrypoint.sh create mode 100644 scripts/run_migrations.py diff --git a/Dockerfile b/Dockerfile index 2565f21..7264985 100644 --- a/Dockerfile +++ b/Dockerfile @@ -102,10 +102,13 @@ RUN pip install --upgrade pip \ COPY . /app -RUN chown -R appuser:app /app +RUN chown -R appuser:app /app \ + && chmod +x /app/scripts/docker-entrypoint.sh USER appuser EXPOSE 8003 +ENTRYPOINT ["/app/scripts/docker-entrypoint.sh"] + CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"] diff --git a/alembic/versions/20251109_01_initial_schema.py b/alembic/versions/20251109_01_initial_schema.py deleted file mode 100644 index cf282d5..0000000 --- a/alembic/versions/20251109_01_initial_schema.py +++ /dev/null @@ -1,220 +0,0 @@ -"""Initial domain schema""" - -from __future__ import annotations - -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = "20251109_01" -down_revision = None -branch_labels = None -depends_on = None - - -mining_operation_type = sa.Enum( - "open_pit", - "underground", - "in_situ_leach", - "placer", - "quarry", - "mountaintop_removal", - "other", - name="miningoperationtype", -) - -scenario_status = sa.Enum( - "draft", - "active", - "archived", - name="scenariostatus", -) - -financial_category = sa.Enum( - "capex", - "opex", - "revenue", - "contingency", - "other", - name="financialcategory", -) - -cost_bucket = sa.Enum( - "capital_initial", - "capital_sustaining", - "operating_fixed", - "operating_variable", - "maintenance", - "reclamation", - "royalties", - "general_admin", - name="costbucket", -) - -distribution_type = sa.Enum( - "normal", - "triangular", - "uniform", - "lognormal", - "custom", - name="distributiontype", -) - -stochastic_variable = sa.Enum( - "ore_grade", - "recovery_rate", - "metal_price", - "operating_cost", - "capital_cost", - "discount_rate", - "throughput", - name="stochasticvariable", -) - -resource_type = sa.Enum( - "diesel", - "electricity", - "water", - "explosives", - "reagents", - "labor", - "equipment_hours", - "tailings_capacity", - name="resourcetype", -) - - -def upgrade() -> None: - bind = op.get_bind() - mining_operation_type.create(bind, checkfirst=True) - scenario_status.create(bind, checkfirst=True) - financial_category.create(bind, checkfirst=True) - cost_bucket.create(bind, checkfirst=True) - distribution_type.create(bind, checkfirst=True) - stochastic_variable.create(bind, checkfirst=True) - resource_type.create(bind, checkfirst=True) - - op.create_table( - "projects", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(length=255), nullable=False), - sa.Column("location", sa.String(length=255), nullable=True), - sa.Column("operation_type", mining_operation_type, nullable=False), - sa.Column("description", sa.Text(), nullable=True), - sa.Column("created_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.Column("updated_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), - ) - op.create_index(op.f("ix_projects_id"), "projects", ["id"], unique=False) - - op.create_table( - "scenarios", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("project_id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(length=255), nullable=False), - sa.Column("description", sa.Text(), nullable=True), - sa.Column("status", scenario_status, nullable=False), - sa.Column("start_date", sa.Date(), nullable=True), - sa.Column("end_date", sa.Date(), nullable=True), - sa.Column("discount_rate", sa.Numeric( - precision=5, scale=2), nullable=True), - sa.Column("currency", sa.String(length=3), nullable=True), - sa.Column("primary_resource", resource_type, nullable=True), - sa.Column("created_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.Column("updated_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.ForeignKeyConstraint( - ["project_id"], ["projects.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index(op.f("ix_scenarios_id"), "scenarios", ["id"], unique=False) - op.create_index(op.f("ix_scenarios_project_id"), - "scenarios", ["project_id"], unique=False) - - op.create_table( - "financial_inputs", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("scenario_id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(length=255), nullable=False), - sa.Column("category", financial_category, nullable=False), - sa.Column("cost_bucket", cost_bucket, nullable=True), - sa.Column("amount", sa.Numeric(precision=18, scale=2), nullable=False), - sa.Column("currency", sa.String(length=3), nullable=True), - sa.Column("effective_date", sa.Date(), nullable=True), - sa.Column("notes", sa.Text(), nullable=True), - sa.Column("created_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.Column("updated_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.ForeignKeyConstraint( - ["scenario_id"], ["scenarios.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index(op.f("ix_financial_inputs_id"), - "financial_inputs", ["id"], unique=False) - op.create_index(op.f("ix_financial_inputs_scenario_id"), - "financial_inputs", ["scenario_id"], unique=False) - - op.create_table( - "simulation_parameters", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("scenario_id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(length=255), nullable=False), - sa.Column("distribution", distribution_type, nullable=False), - sa.Column("variable", stochastic_variable, nullable=True), - sa.Column("resource_type", resource_type, nullable=True), - sa.Column("mean_value", sa.Numeric( - precision=18, scale=4), nullable=True), - sa.Column("standard_deviation", sa.Numeric( - precision=18, scale=4), nullable=True), - sa.Column("minimum_value", sa.Numeric( - precision=18, scale=4), nullable=True), - sa.Column("maximum_value", sa.Numeric( - precision=18, scale=4), nullable=True), - sa.Column("unit", sa.String(length=32), nullable=True), - sa.Column("configuration", sa.JSON(), nullable=True), - sa.Column("created_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.Column("updated_at", sa.DateTime(timezone=True), - server_default=sa.func.now(), nullable=False), - sa.ForeignKeyConstraint( - ["scenario_id"], ["scenarios.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index(op.f("ix_simulation_parameters_id"), - "simulation_parameters", ["id"], unique=False) - op.create_index(op.f("ix_simulation_parameters_scenario_id"), - "simulation_parameters", ["scenario_id"], unique=False) - - -def downgrade() -> None: - op.drop_index(op.f("ix_simulation_parameters_scenario_id"), - table_name="simulation_parameters") - op.drop_index(op.f("ix_simulation_parameters_id"), - table_name="simulation_parameters") - op.drop_table("simulation_parameters") - - op.drop_index(op.f("ix_financial_inputs_scenario_id"), - table_name="financial_inputs") - op.drop_index(op.f("ix_financial_inputs_id"), - table_name="financial_inputs") - op.drop_table("financial_inputs") - - op.drop_index(op.f("ix_scenarios_project_id"), table_name="scenarios") - op.drop_index(op.f("ix_scenarios_id"), table_name="scenarios") - op.drop_table("scenarios") - - op.drop_index(op.f("ix_projects_id"), table_name="projects") - op.drop_table("projects") - - resource_type.drop(op.get_bind(), checkfirst=True) - stochastic_variable.drop(op.get_bind(), checkfirst=True) - distribution_type.drop(op.get_bind(), checkfirst=True) - cost_bucket.drop(op.get_bind(), checkfirst=True) - financial_category.drop(op.get_bind(), checkfirst=True) - scenario_status.drop(op.get_bind(), checkfirst=True) - mining_operation_type.drop(op.get_bind(), checkfirst=True) diff --git a/alembic/versions/20251109_02_add_auth_tables.py b/alembic/versions/20251109_02_add_auth_tables.py deleted file mode 100644 index c669bd2..0000000 --- a/alembic/versions/20251109_02_add_auth_tables.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Add authentication and RBAC tables""" - -from __future__ import annotations - -from alembic import op -import sqlalchemy as sa -from passlib.context import CryptContext -from sqlalchemy.sql import column, table - -# revision identifiers, used by Alembic. -revision = "20251109_02" -down_revision = "20251109_01" -branch_labels = None -depends_on = None - -password_context = CryptContext(schemes=["argon2"], deprecated="auto") - - -def upgrade() -> None: - op.create_table( - "users", - sa.Column("id", sa.Integer(), primary_key=True), - sa.Column("email", sa.String(length=255), nullable=False), - sa.Column("username", sa.String(length=128), nullable=False), - sa.Column("password_hash", sa.String(length=255), nullable=False), - sa.Column( - "is_active", - sa.Boolean(), - nullable=False, - server_default=sa.true(), - ), - sa.Column( - "is_superuser", - sa.Boolean(), - nullable=False, - server_default=sa.false(), - ), - sa.Column("last_login_at", sa.DateTime(timezone=True), nullable=True), - sa.Column( - "created_at", - sa.DateTime(timezone=True), - nullable=False, - server_default=sa.func.now(), - ), - sa.Column( - "updated_at", - sa.DateTime(timezone=True), - nullable=False, - server_default=sa.func.now(), - ), - sa.UniqueConstraint("email", name="uq_users_email"), - sa.UniqueConstraint("username", name="uq_users_username"), - ) - op.create_index( - "ix_users_active_superuser", - "users", - ["is_active", "is_superuser"], - unique=False, - ) - - op.create_table( - "roles", - sa.Column("id", sa.Integer(), primary_key=True), - sa.Column("name", sa.String(length=64), nullable=False), - sa.Column("display_name", sa.String(length=128), nullable=False), - sa.Column("description", sa.Text(), nullable=True), - sa.Column( - "created_at", - sa.DateTime(timezone=True), - nullable=False, - server_default=sa.func.now(), - ), - sa.Column( - "updated_at", - sa.DateTime(timezone=True), - nullable=False, - server_default=sa.func.now(), - ), - sa.UniqueConstraint("name", name="uq_roles_name"), - ) - - op.create_table( - "user_roles", - sa.Column("user_id", sa.Integer(), nullable=False), - sa.Column("role_id", sa.Integer(), nullable=False), - sa.Column( - "granted_at", - sa.DateTime(timezone=True), - nullable=False, - server_default=sa.func.now(), - ), - sa.Column("granted_by", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["user_id"], - ["users.id"], - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["role_id"], - ["roles.id"], - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["granted_by"], - ["users.id"], - ondelete="SET NULL", - ), - sa.PrimaryKeyConstraint("user_id", "role_id"), - sa.UniqueConstraint("user_id", "role_id", - name="uq_user_roles_user_role"), - ) - op.create_index( - "ix_user_roles_role_id", - "user_roles", - ["role_id"], - unique=False, - ) - - # Seed default roles - roles_table = table( - "roles", - column("id", sa.Integer()), - column("name", sa.String()), - column("display_name", sa.String()), - column("description", sa.Text()), - ) - - op.bulk_insert( - roles_table, - [ - { - "id": 1, - "name": "admin", - "display_name": "Administrator", - "description": "Full platform access with user management rights.", - }, - { - "id": 2, - "name": "project_manager", - "display_name": "Project Manager", - "description": "Manage projects, scenarios, and associated data.", - }, - { - "id": 3, - "name": "analyst", - "display_name": "Analyst", - "description": "Review dashboards and scenario outputs.", - }, - { - "id": 4, - "name": "viewer", - "display_name": "Viewer", - "description": "Read-only access to assigned projects and reports.", - }, - ], - ) - - admin_password_hash = password_context.hash("ChangeMe123!") - - users_table = table( - "users", - column("id", sa.Integer()), - column("email", sa.String()), - column("username", sa.String()), - column("password_hash", sa.String()), - column("is_active", sa.Boolean()), - column("is_superuser", sa.Boolean()), - ) - - op.bulk_insert( - users_table, - [ - { - "id": 1, - "email": "admin@calminer.local", - "username": "admin", - "password_hash": admin_password_hash, - "is_active": True, - "is_superuser": True, - } - ], - ) - - user_roles_table = table( - "user_roles", - column("user_id", sa.Integer()), - column("role_id", sa.Integer()), - column("granted_by", sa.Integer()), - ) - - op.bulk_insert( - user_roles_table, - [ - { - "user_id": 1, - "role_id": 1, - "granted_by": 1, - } - ], - ) - - -def downgrade() -> None: - op.drop_index("ix_user_roles_role_id", table_name="user_roles") - op.drop_table("user_roles") - - op.drop_table("roles") - - op.drop_index("ix_users_active_superuser", table_name="users") - op.drop_table("users") diff --git a/alembic/versions/20251111_00_initial_schema.py b/alembic/versions/20251111_00_initial_schema.py new file mode 100644 index 0000000..5a0deba --- /dev/null +++ b/alembic/versions/20251111_00_initial_schema.py @@ -0,0 +1,718 @@ +"""Combined initial schema""" + +from __future__ import annotations + +from datetime import datetime, timezone + +from alembic import op +import sqlalchemy as sa +from passlib.context import CryptContext +from sqlalchemy.sql import column, table + +# revision identifiers, used by Alembic. +revision = "20251111_00" +down_revision = None +branch_labels = None +depends_on = None + +password_context = CryptContext(schemes=["argon2"], deprecated="auto") + +mining_operation_type = sa.Enum( + "open_pit", + "underground", + "in_situ_leach", + "placer", + "quarry", + "mountaintop_removal", + "other", + name="miningoperationtype", +) + +scenario_status = sa.Enum( + "draft", + "active", + "archived", + name="scenariostatus", +) + +financial_category = sa.Enum( + "capex", + "opex", + "revenue", + "contingency", + "other", + name="financialcategory", +) + +cost_bucket = sa.Enum( + "capital_initial", + "capital_sustaining", + "operating_fixed", + "operating_variable", + "maintenance", + "reclamation", + "royalties", + "general_admin", + name="costbucket", +) + +distribution_type = sa.Enum( + "normal", + "triangular", + "uniform", + "lognormal", + "custom", + name="distributiontype", +) + +stochastic_variable = sa.Enum( + "ore_grade", + "recovery_rate", + "metal_price", + "operating_cost", + "capital_cost", + "discount_rate", + "throughput", + name="stochasticvariable", +) + +resource_type = sa.Enum( + "diesel", + "electricity", + "water", + "explosives", + "reagents", + "labor", + "equipment_hours", + "tailings_capacity", + name="resourcetype", +) + + +DEFAULT_PRICING_SLUG = "default" + + +def _ensure_default_pricing_settings(connection) -> int: + settings_table = table( + "pricing_settings", + column("id", sa.Integer()), + column("slug", sa.String()), + column("name", sa.String()), + column("description", sa.Text()), + column("default_currency", sa.String()), + column("default_payable_pct", sa.Numeric()), + column("moisture_threshold_pct", sa.Numeric()), + column("moisture_penalty_per_pct", sa.Numeric()), + column("created_at", sa.DateTime(timezone=True)), + column("updated_at", sa.DateTime(timezone=True)), + ) + + existing = connection.execute( + sa.select(settings_table.c.id).where( + settings_table.c.slug == DEFAULT_PRICING_SLUG + ) + ).scalar_one_or_none() + if existing is not None: + return existing + + now = datetime.now(timezone.utc) + insert_stmt = settings_table.insert().values( + slug=DEFAULT_PRICING_SLUG, + name="Default Pricing", + description="Automatically generated default pricing settings.", + default_currency="USD", + default_payable_pct=100.0, + moisture_threshold_pct=8.0, + moisture_penalty_per_pct=0.0, + created_at=now, + updated_at=now, + ) + result = connection.execute(insert_stmt) + default_id = result.inserted_primary_key[0] + if default_id is None: + default_id = connection.execute( + sa.select(settings_table.c.id).where( + settings_table.c.slug == DEFAULT_PRICING_SLUG + ) + ).scalar_one() + return default_id + + +def upgrade() -> None: + bind = op.get_bind() + + # Enumerations + mining_operation_type.create(bind, checkfirst=True) + scenario_status.create(bind, checkfirst=True) + financial_category.create(bind, checkfirst=True) + cost_bucket.create(bind, checkfirst=True) + distribution_type.create(bind, checkfirst=True) + stochastic_variable.create(bind, checkfirst=True) + resource_type.create(bind, checkfirst=True) + + # Pricing settings core tables + op.create_table( + "pricing_settings", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("name", sa.String(length=128), nullable=False), + sa.Column("slug", sa.String(length=64), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("default_currency", sa.String(length=3), nullable=True), + sa.Column( + "default_payable_pct", + sa.Numeric(precision=5, scale=2), + nullable=False, + server_default=sa.text("100.00"), + ), + sa.Column( + "moisture_threshold_pct", + sa.Numeric(precision=5, scale=2), + nullable=False, + server_default=sa.text("8.00"), + ), + sa.Column( + "moisture_penalty_per_pct", + sa.Numeric(precision=14, scale=4), + nullable=False, + server_default=sa.text("0.0000"), + ), + sa.Column("metadata", sa.JSON(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint("name", name="uq_pricing_settings_name"), + sa.UniqueConstraint("slug", name="uq_pricing_settings_slug"), + ) + op.create_index( + op.f("ix_pricing_settings_id"), + "pricing_settings", + ["id"], + unique=False, + ) + + op.create_table( + "pricing_metal_settings", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column( + "pricing_settings_id", + sa.Integer(), + sa.ForeignKey("pricing_settings.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column("metal_code", sa.String(length=32), nullable=False), + sa.Column("payable_pct", sa.Numeric( + precision=5, scale=2), nullable=True), + sa.Column( + "moisture_threshold_pct", + sa.Numeric(precision=5, scale=2), + nullable=True, + ), + sa.Column( + "moisture_penalty_per_pct", + sa.Numeric(precision=14, scale=4), + nullable=True, + ), + sa.Column("data", sa.JSON(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint( + "pricing_settings_id", + "metal_code", + name="uq_pricing_metal_settings_code", + ), + ) + op.create_index( + op.f("ix_pricing_metal_settings_id"), + "pricing_metal_settings", + ["id"], + unique=False, + ) + op.create_index( + op.f("ix_pricing_metal_settings_pricing_settings_id"), + "pricing_metal_settings", + ["pricing_settings_id"], + unique=False, + ) + + op.create_table( + "pricing_impurity_settings", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column( + "pricing_settings_id", + sa.Integer(), + sa.ForeignKey("pricing_settings.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column("impurity_code", sa.String(length=32), nullable=False), + sa.Column( + "threshold_ppm", + sa.Numeric(precision=14, scale=4), + nullable=False, + server_default=sa.text("0.0000"), + ), + sa.Column( + "penalty_per_ppm", + sa.Numeric(precision=14, scale=4), + nullable=False, + server_default=sa.text("0.0000"), + ), + sa.Column("notes", sa.Text(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint( + "pricing_settings_id", + "impurity_code", + name="uq_pricing_impurity_settings_code", + ), + ) + op.create_index( + op.f("ix_pricing_impurity_settings_id"), + "pricing_impurity_settings", + ["id"], + unique=False, + ) + op.create_index( + op.f("ix_pricing_impurity_settings_pricing_settings_id"), + "pricing_impurity_settings", + ["pricing_settings_id"], + unique=False, + ) + + # Core domain tables + op.create_table( + "projects", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("location", sa.String(length=255), nullable=True), + sa.Column("operation_type", mining_operation_type, nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column( + "pricing_settings_id", + sa.Integer(), + sa.ForeignKey("pricing_settings.id", ondelete="SET NULL"), + nullable=True, + ), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_index(op.f("ix_projects_id"), "projects", ["id"], unique=False) + op.create_index( + "ix_projects_pricing_settings_id", + "projects", + ["pricing_settings_id"], + unique=False, + ) + + op.create_table( + "scenarios", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("status", scenario_status, nullable=False), + sa.Column("start_date", sa.Date(), nullable=True), + sa.Column("end_date", sa.Date(), nullable=True), + sa.Column("discount_rate", sa.Numeric( + precision=5, scale=2), nullable=True), + sa.Column("currency", sa.String(length=3), nullable=True), + sa.Column("primary_resource", resource_type, nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["project_id"], ["projects.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_scenarios_id"), "scenarios", ["id"], unique=False) + op.create_index( + op.f("ix_scenarios_project_id"), + "scenarios", + ["project_id"], + unique=False, + ) + + op.create_table( + "financial_inputs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("scenario_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("category", financial_category, nullable=False), + sa.Column("cost_bucket", cost_bucket, nullable=True), + sa.Column("amount", sa.Numeric(precision=18, scale=2), nullable=False), + sa.Column("currency", sa.String(length=3), nullable=True), + sa.Column("effective_date", sa.Date(), nullable=True), + sa.Column("notes", sa.Text(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["scenario_id"], ["scenarios.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_financial_inputs_id"), + "financial_inputs", + ["id"], + unique=False, + ) + op.create_index( + op.f("ix_financial_inputs_scenario_id"), + "financial_inputs", + ["scenario_id"], + unique=False, + ) + + op.create_table( + "simulation_parameters", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("scenario_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("distribution", distribution_type, nullable=False), + sa.Column("variable", stochastic_variable, nullable=True), + sa.Column("resource_type", resource_type, nullable=True), + sa.Column("mean_value", sa.Numeric( + precision=18, scale=4), nullable=True), + sa.Column( + "standard_deviation", + sa.Numeric(precision=18, scale=4), + nullable=True, + ), + sa.Column( + "minimum_value", + sa.Numeric(precision=18, scale=4), + nullable=True, + ), + sa.Column( + "maximum_value", + sa.Numeric(precision=18, scale=4), + nullable=True, + ), + sa.Column("unit", sa.String(length=32), nullable=True), + sa.Column("configuration", sa.JSON(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.func.now(), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["scenario_id"], ["scenarios.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_simulation_parameters_id"), + "simulation_parameters", + ["id"], + unique=False, + ) + op.create_index( + op.f("ix_simulation_parameters_scenario_id"), + "simulation_parameters", + ["scenario_id"], + unique=False, + ) + + # Authentication and RBAC tables + op.create_table( + "users", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("email", sa.String(length=255), nullable=False), + sa.Column("username", sa.String(length=128), nullable=False), + sa.Column("password_hash", sa.String(length=255), nullable=False), + sa.Column("is_active", sa.Boolean(), + nullable=False, server_default=sa.true()), + sa.Column( + "is_superuser", + sa.Boolean(), + nullable=False, + server_default=sa.false(), + ), + sa.Column("last_login_at", sa.DateTime(timezone=True), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint("email", name="uq_users_email"), + sa.UniqueConstraint("username", name="uq_users_username"), + ) + op.create_index( + "ix_users_active_superuser", + "users", + ["is_active", "is_superuser"], + unique=False, + ) + + op.create_table( + "roles", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("name", sa.String(length=64), nullable=False), + sa.Column("display_name", sa.String(length=128), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.UniqueConstraint("name", name="uq_roles_name"), + ) + + op.create_table( + "user_roles", + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("role_id", sa.Integer(), nullable=False), + sa.Column( + "granted_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column("granted_by", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["role_id"], ["roles.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["granted_by"], ["users.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("user_id", "role_id"), + sa.UniqueConstraint("user_id", "role_id", + name="uq_user_roles_user_role"), + ) + op.create_index( + "ix_user_roles_role_id", + "user_roles", + ["role_id"], + unique=False, + ) + + # Seed roles and default admin + roles_table = table( + "roles", + column("id", sa.Integer()), + column("name", sa.String()), + column("display_name", sa.String()), + column("description", sa.Text()), + ) + + op.bulk_insert( + roles_table, + [ + { + "id": 1, + "name": "admin", + "display_name": "Administrator", + "description": "Full platform access with user management rights.", + }, + { + "id": 2, + "name": "project_manager", + "display_name": "Project Manager", + "description": "Manage projects, scenarios, and associated data.", + }, + { + "id": 3, + "name": "analyst", + "display_name": "Analyst", + "description": "Review dashboards and scenario outputs.", + }, + { + "id": 4, + "name": "viewer", + "display_name": "Viewer", + "description": "Read-only access to assigned projects and reports.", + }, + ], + ) + + admin_password_hash = password_context.hash("ChangeMe123!") + + users_table = table( + "users", + column("id", sa.Integer()), + column("email", sa.String()), + column("username", sa.String()), + column("password_hash", sa.String()), + column("is_active", sa.Boolean()), + column("is_superuser", sa.Boolean()), + ) + + op.bulk_insert( + users_table, + [ + { + "id": 1, + "email": "admin@calminer.local", + "username": "admin", + "password_hash": admin_password_hash, + "is_active": True, + "is_superuser": True, + } + ], + ) + + user_roles_table = table( + "user_roles", + column("user_id", sa.Integer()), + column("role_id", sa.Integer()), + column("granted_by", sa.Integer()), + ) + + op.bulk_insert( + user_roles_table, + [ + { + "user_id": 1, + "role_id": 1, + "granted_by": 1, + } + ], + ) + + # Ensure a default pricing settings record exists for future project linkage + _ensure_default_pricing_settings(bind) + + +def downgrade() -> None: + # Drop RBAC + op.drop_index("ix_user_roles_role_id", table_name="user_roles") + op.drop_table("user_roles") + + op.drop_table("roles") + + op.drop_index("ix_users_active_superuser", table_name="users") + op.drop_table("users") + + # Drop domain tables + op.drop_index( + op.f("ix_simulation_parameters_scenario_id"), + table_name="simulation_parameters", + ) + op.drop_index(op.f("ix_simulation_parameters_id"), + table_name="simulation_parameters") + op.drop_table("simulation_parameters") + + op.drop_index( + op.f("ix_financial_inputs_scenario_id"), table_name="financial_inputs" + ) + op.drop_index(op.f("ix_financial_inputs_id"), + table_name="financial_inputs") + op.drop_table("financial_inputs") + + op.drop_index(op.f("ix_scenarios_project_id"), table_name="scenarios") + op.drop_index(op.f("ix_scenarios_id"), table_name="scenarios") + op.drop_table("scenarios") + + op.drop_index("ix_projects_pricing_settings_id", table_name="projects") + op.drop_index(op.f("ix_projects_id"), table_name="projects") + op.drop_table("projects") + + # Drop pricing settings ancillary tables + op.drop_index( + op.f("ix_pricing_impurity_settings_pricing_settings_id"), + table_name="pricing_impurity_settings", + ) + op.drop_index( + op.f("ix_pricing_impurity_settings_id"), + table_name="pricing_impurity_settings", + ) + op.drop_table("pricing_impurity_settings") + + op.drop_index( + op.f("ix_pricing_metal_settings_pricing_settings_id"), + table_name="pricing_metal_settings", + ) + op.drop_index( + op.f("ix_pricing_metal_settings_id"), + table_name="pricing_metal_settings", + ) + op.drop_table("pricing_metal_settings") + + op.drop_index(op.f("ix_pricing_settings_id"), + table_name="pricing_settings") + op.drop_table("pricing_settings") + + # Drop enumerations + resource_type.drop(op.get_bind(), checkfirst=True) + stochastic_variable.drop(op.get_bind(), checkfirst=True) + distribution_type.drop(op.get_bind(), checkfirst=True) + cost_bucket.drop(op.get_bind(), checkfirst=True) + financial_category.drop(op.get_bind(), checkfirst=True) + scenario_status.drop(op.get_bind(), checkfirst=True) + mining_operation_type.drop(op.get_bind(), checkfirst=True) diff --git a/changelog.md b/changelog.md index 5ba2548..d9c7a34 100644 --- a/changelog.md +++ b/changelog.md @@ -46,3 +46,6 @@ - Authored `calminer-docs/specifications/financial_metrics.md` capturing DCF assumptions, solver behaviours, and worked examples, and cross-linked the architecture concepts to the new reference for consistent navigation. - Implemented `services/simulation.py` Monte Carlo engine with configurable distributions, summary aggregation, and reproducible RNG seeding, introduced regression tests in `tests/test_simulation.py`, and documented configuration/usage in `calminer-docs/specifications/monte_carlo_simulation.md` with architecture cross-links. - Polished reporting HTML contexts by cleaning stray fragments in `routes/reports.py`, adding download action metadata for project and scenario pages, and generating scenario comparison download URLs with correctly serialised repeated `scenario_ids` parameters. +- Consolidated Alembic history into a single initial migration (`20251111_00_initial_schema.py`), removed superseded revision files, and ensured Alembic metadata still references the project metadata for clean bootstrap. +- Added `scripts/run_migrations.py` and a Docker entrypoint wrapper to run Alembic migrations before `uvicorn` starts, removed the fallback `Base.metadata.create_all` call, and updated `calminer-docs/admin/installation.md` so developers know how to apply migrations locally or via Docker. +- Configured pytest defaults to collect coverage (`--cov`) with an 80% fail-under gate, excluded entrypoint/reporting scaffolds from the calculation, updated contributor docs with the standard `pytest` command, and verified the suite now reports 83% coverage. diff --git a/main.py b/main.py index daf9e7a..5c791cc 100644 --- a/main.py +++ b/main.py @@ -4,7 +4,6 @@ from typing import Awaitable, Callable from fastapi import FastAPI, Request, Response from fastapi.staticfiles import StaticFiles -from config.database import Base, engine from config.settings import get_settings from middleware.auth_session import AuthSessionMiddleware from middleware.validation import validate_json @@ -24,9 +23,6 @@ from routes.scenarios import router as scenarios_router from monitoring import router as monitoring_router from services.bootstrap import bootstrap_admin, bootstrap_pricing_settings -# Initialize database schema (imports above ensure models are registered) -Base.metadata.create_all(bind=engine) - app = FastAPI() app.add_middleware(AuthSessionMiddleware) diff --git a/pyproject.toml b/pyproject.toml index de07a01..1a42f43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,4 +16,22 @@ exclude = ''' [tool.pytest.ini_options] pythonpath = ["."] +testpaths = ["tests"] +addopts = "-ra --strict-config --strict-markers --cov=. --cov-report=term-missing --cov-report=xml --cov-fail-under=80" + +[tool.coverage.run] +branch = true +source = ["."] +omit = [ + "tests/*", + "alembic/*", + "scripts/*", + "main.py", + "routes/reports.py", + "services/reporting.py", +] + +[tool.coverage.report] +skip_empty = true +show_missing = true diff --git a/scripts/docker-entrypoint.sh b/scripts/docker-entrypoint.sh new file mode 100644 index 0000000..a27d43a --- /dev/null +++ b/scripts/docker-entrypoint.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env sh +set -e + +PYTHONPATH="/app:${PYTHONPATH}" +export PYTHONPATH + +python -m scripts.run_migrations + +exec "$@" diff --git a/scripts/run_migrations.py b/scripts/run_migrations.py new file mode 100644 index 0000000..f295add --- /dev/null +++ b/scripts/run_migrations.py @@ -0,0 +1,42 @@ +"""Utility for applying Alembic migrations before application startup.""" +from __future__ import annotations + +import logging +from pathlib import Path + +from alembic import command +from alembic.config import Config +from dotenv import load_dotenv + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def _load_env() -> None: + """Ensure environment variables from .env are available.""" + load_dotenv() + + +def _alembic_config(project_root: Path) -> Config: + config_path = project_root / "alembic.ini" + if not config_path.exists(): + raise FileNotFoundError(f"Missing alembic.ini at {config_path}") + + config = Config(str(config_path)) + config.set_main_option("script_location", str(project_root / "alembic")) + return config + + +def run_migrations(target_revision: str = "head") -> None: + """Apply Alembic migrations up to the given revision.""" + project_root = Path(__file__).resolve().parent.parent + _load_env() + + config = _alembic_config(project_root) + logger.info("Applying database migrations up to %s", target_revision) + command.upgrade(config, target_revision) + logger.info("Database migrations applied successfully") + + +if __name__ == "__main__": + run_migrations()