Refactor database initialization and remove Alembic migrations

- Removed legacy Alembic migration files and consolidated schema management into a new Pydantic-backed initializer (`scripts/init_db.py`).
- Updated `main.py` to ensure the new DB initializer runs on startup, maintaining idempotency.
- Adjusted session management in `config/database.py` to prevent DetachedInstanceError.
- Introduced new enums in `models/enums.py` for better organization and clarity.
- Refactored various models to utilize the new enums, improving code maintainability.
- Enhanced middleware to handle JSON validation more robustly, ensuring non-JSON requests do not trigger JSON errors.
- Added tests for middleware and enums to ensure expected behavior and consistency.
- Updated changelog to reflect significant changes and improvements.
This commit is contained in:
2025-11-12 16:29:44 +01:00
parent 9d4c807475
commit 6e466a3fd2
28 changed files with 289 additions and 1193 deletions

View File

@@ -102,13 +102,12 @@ RUN pip install --upgrade pip \
COPY . /app
RUN chown -R appuser:app /app \
&& chmod +x /app/scripts/docker-entrypoint.sh
RUN chown -R appuser:app /app
USER appuser
EXPOSE 8003
ENTRYPOINT ["/app/scripts/docker-entrypoint.sh"]
ENTRYPOINT ["uvicorn"]
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]
CMD ["main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]

View File

@@ -8,4 +8,6 @@ The system is designed to help mining companies make informed decisions by simul
## Documentation & quickstart
This repository contains only code. See detailed developer and architecture documentation in the [Docs](https://git.allucanget.biz/allucanget/calminer-docs) repository.
- Detailed developer, architecture, and operations guides live in the companion [calminer-docs](../calminer-docs/) repository.
- For a local run, create a `.env` (see `.env.example`), install requirements, then execute `python -m scripts.init_db` followed by `uvicorn main:app --reload`. The initializer is safe to rerun and seeds demo data automatically.
- To wipe and recreate the schema in development, run `CALMINER_ENV=development python -m scripts.reset_db` before invoking the initializer again.

View File

@@ -1,35 +0,0 @@
[alembic]
script_location = alembic
sqlalchemy.url = %(DATABASE_URL)s
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s

View File

@@ -1,62 +0,0 @@
from __future__ import annotations
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from config.database import Base, DATABASE_URL
from models import * # noqa: F401,F403 - ensure models are imported for metadata registration
# this is the Alembic Config object, which provides access to the values within the .ini file.
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
config.set_main_option("sqlalchemy.url", DATABASE_URL)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations() -> None:
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations()

View File

@@ -1,17 +0,0 @@
"""${message}"""
revision = ${repr(revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -1,718 +0,0 @@
"""Combined initial schema"""
from __future__ import annotations
from datetime import datetime, timezone
from alembic import op
import sqlalchemy as sa
from passlib.context import CryptContext
from sqlalchemy.sql import column, table
# revision identifiers, used by Alembic.
revision = "20251111_00"
down_revision = None
branch_labels = None
depends_on = None
password_context = CryptContext(schemes=["argon2"], deprecated="auto")
mining_operation_type = sa.Enum(
"open_pit",
"underground",
"in_situ_leach",
"placer",
"quarry",
"mountaintop_removal",
"other",
name="miningoperationtype",
)
scenario_status = sa.Enum(
"draft",
"active",
"archived",
name="scenariostatus",
)
financial_category = sa.Enum(
"capex",
"opex",
"revenue",
"contingency",
"other",
name="financialcategory",
)
cost_bucket = sa.Enum(
"capital_initial",
"capital_sustaining",
"operating_fixed",
"operating_variable",
"maintenance",
"reclamation",
"royalties",
"general_admin",
name="costbucket",
)
distribution_type = sa.Enum(
"normal",
"triangular",
"uniform",
"lognormal",
"custom",
name="distributiontype",
)
stochastic_variable = sa.Enum(
"ore_grade",
"recovery_rate",
"metal_price",
"operating_cost",
"capital_cost",
"discount_rate",
"throughput",
name="stochasticvariable",
)
resource_type = sa.Enum(
"diesel",
"electricity",
"water",
"explosives",
"reagents",
"labor",
"equipment_hours",
"tailings_capacity",
name="resourcetype",
)
DEFAULT_PRICING_SLUG = "default"
def _ensure_default_pricing_settings(connection) -> int:
settings_table = table(
"pricing_settings",
column("id", sa.Integer()),
column("slug", sa.String()),
column("name", sa.String()),
column("description", sa.Text()),
column("default_currency", sa.String()),
column("default_payable_pct", sa.Numeric()),
column("moisture_threshold_pct", sa.Numeric()),
column("moisture_penalty_per_pct", sa.Numeric()),
column("created_at", sa.DateTime(timezone=True)),
column("updated_at", sa.DateTime(timezone=True)),
)
existing = connection.execute(
sa.select(settings_table.c.id).where(
settings_table.c.slug == DEFAULT_PRICING_SLUG
)
).scalar_one_or_none()
if existing is not None:
return existing
now = datetime.now(timezone.utc)
insert_stmt = settings_table.insert().values(
slug=DEFAULT_PRICING_SLUG,
name="Default Pricing",
description="Automatically generated default pricing settings.",
default_currency="USD",
default_payable_pct=100.0,
moisture_threshold_pct=8.0,
moisture_penalty_per_pct=0.0,
created_at=now,
updated_at=now,
)
result = connection.execute(insert_stmt)
default_id = result.inserted_primary_key[0]
if default_id is None:
default_id = connection.execute(
sa.select(settings_table.c.id).where(
settings_table.c.slug == DEFAULT_PRICING_SLUG
)
).scalar_one()
return default_id
def upgrade() -> None:
bind = op.get_bind()
# Enumerations
mining_operation_type.create(bind, checkfirst=True)
scenario_status.create(bind, checkfirst=True)
financial_category.create(bind, checkfirst=True)
cost_bucket.create(bind, checkfirst=True)
distribution_type.create(bind, checkfirst=True)
stochastic_variable.create(bind, checkfirst=True)
resource_type.create(bind, checkfirst=True)
# Pricing settings core tables
op.create_table(
"pricing_settings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(length=128), nullable=False),
sa.Column("slug", sa.String(length=64), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("default_currency", sa.String(length=3), nullable=True),
sa.Column(
"default_payable_pct",
sa.Numeric(precision=5, scale=2),
nullable=False,
server_default=sa.text("100.00"),
),
sa.Column(
"moisture_threshold_pct",
sa.Numeric(precision=5, scale=2),
nullable=False,
server_default=sa.text("8.00"),
),
sa.Column(
"moisture_penalty_per_pct",
sa.Numeric(precision=14, scale=4),
nullable=False,
server_default=sa.text("0.0000"),
),
sa.Column("metadata", sa.JSON(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint("name", name="uq_pricing_settings_name"),
sa.UniqueConstraint("slug", name="uq_pricing_settings_slug"),
)
op.create_index(
op.f("ix_pricing_settings_id"),
"pricing_settings",
["id"],
unique=False,
)
op.create_table(
"pricing_metal_settings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"pricing_settings_id",
sa.Integer(),
sa.ForeignKey("pricing_settings.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("metal_code", sa.String(length=32), nullable=False),
sa.Column("payable_pct", sa.Numeric(
precision=5, scale=2), nullable=True),
sa.Column(
"moisture_threshold_pct",
sa.Numeric(precision=5, scale=2),
nullable=True,
),
sa.Column(
"moisture_penalty_per_pct",
sa.Numeric(precision=14, scale=4),
nullable=True,
),
sa.Column("data", sa.JSON(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint(
"pricing_settings_id",
"metal_code",
name="uq_pricing_metal_settings_code",
),
)
op.create_index(
op.f("ix_pricing_metal_settings_id"),
"pricing_metal_settings",
["id"],
unique=False,
)
op.create_index(
op.f("ix_pricing_metal_settings_pricing_settings_id"),
"pricing_metal_settings",
["pricing_settings_id"],
unique=False,
)
op.create_table(
"pricing_impurity_settings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"pricing_settings_id",
sa.Integer(),
sa.ForeignKey("pricing_settings.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("impurity_code", sa.String(length=32), nullable=False),
sa.Column(
"threshold_ppm",
sa.Numeric(precision=14, scale=4),
nullable=False,
server_default=sa.text("0.0000"),
),
sa.Column(
"penalty_per_ppm",
sa.Numeric(precision=14, scale=4),
nullable=False,
server_default=sa.text("0.0000"),
),
sa.Column("notes", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint(
"pricing_settings_id",
"impurity_code",
name="uq_pricing_impurity_settings_code",
),
)
op.create_index(
op.f("ix_pricing_impurity_settings_id"),
"pricing_impurity_settings",
["id"],
unique=False,
)
op.create_index(
op.f("ix_pricing_impurity_settings_pricing_settings_id"),
"pricing_impurity_settings",
["pricing_settings_id"],
unique=False,
)
# Core domain tables
op.create_table(
"projects",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("location", sa.String(length=255), nullable=True),
sa.Column("operation_type", mining_operation_type, nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column(
"pricing_settings_id",
sa.Integer(),
sa.ForeignKey("pricing_settings.id", ondelete="SET NULL"),
nullable=True,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
op.create_index(op.f("ix_projects_id"), "projects", ["id"], unique=False)
op.create_index(
"ix_projects_pricing_settings_id",
"projects",
["pricing_settings_id"],
unique=False,
)
op.create_table(
"scenarios",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("project_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("status", scenario_status, nullable=False),
sa.Column("start_date", sa.Date(), nullable=True),
sa.Column("end_date", sa.Date(), nullable=True),
sa.Column("discount_rate", sa.Numeric(
precision=5, scale=2), nullable=True),
sa.Column("currency", sa.String(length=3), nullable=True),
sa.Column("primary_resource", resource_type, nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.ForeignKeyConstraint(
["project_id"], ["projects.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_scenarios_id"), "scenarios", ["id"], unique=False)
op.create_index(
op.f("ix_scenarios_project_id"),
"scenarios",
["project_id"],
unique=False,
)
op.create_table(
"financial_inputs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("scenario_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("category", financial_category, nullable=False),
sa.Column("cost_bucket", cost_bucket, nullable=True),
sa.Column("amount", sa.Numeric(precision=18, scale=2), nullable=False),
sa.Column("currency", sa.String(length=3), nullable=True),
sa.Column("effective_date", sa.Date(), nullable=True),
sa.Column("notes", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.ForeignKeyConstraint(
["scenario_id"], ["scenarios.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_financial_inputs_id"),
"financial_inputs",
["id"],
unique=False,
)
op.create_index(
op.f("ix_financial_inputs_scenario_id"),
"financial_inputs",
["scenario_id"],
unique=False,
)
op.create_table(
"simulation_parameters",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("scenario_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("distribution", distribution_type, nullable=False),
sa.Column("variable", stochastic_variable, nullable=True),
sa.Column("resource_type", resource_type, nullable=True),
sa.Column("mean_value", sa.Numeric(
precision=18, scale=4), nullable=True),
sa.Column(
"standard_deviation",
sa.Numeric(precision=18, scale=4),
nullable=True,
),
sa.Column(
"minimum_value",
sa.Numeric(precision=18, scale=4),
nullable=True,
),
sa.Column(
"maximum_value",
sa.Numeric(precision=18, scale=4),
nullable=True,
),
sa.Column("unit", sa.String(length=32), nullable=True),
sa.Column("configuration", sa.JSON(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.ForeignKeyConstraint(
["scenario_id"], ["scenarios.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_simulation_parameters_id"),
"simulation_parameters",
["id"],
unique=False,
)
op.create_index(
op.f("ix_simulation_parameters_scenario_id"),
"simulation_parameters",
["scenario_id"],
unique=False,
)
# Authentication and RBAC tables
op.create_table(
"users",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("email", sa.String(length=255), nullable=False),
sa.Column("username", sa.String(length=128), nullable=False),
sa.Column("password_hash", sa.String(length=255), nullable=False),
sa.Column("is_active", sa.Boolean(),
nullable=False, server_default=sa.true()),
sa.Column(
"is_superuser",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
sa.Column("last_login_at", sa.DateTime(timezone=True), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint("email", name="uq_users_email"),
sa.UniqueConstraint("username", name="uq_users_username"),
)
op.create_index(
"ix_users_active_superuser",
"users",
["is_active", "is_superuser"],
unique=False,
)
op.create_table(
"roles",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(length=64), nullable=False),
sa.Column("display_name", sa.String(length=128), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.UniqueConstraint("name", name="uq_roles_name"),
)
op.create_table(
"user_roles",
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("role_id", sa.Integer(), nullable=False),
sa.Column(
"granted_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column("granted_by", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["role_id"], ["roles.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["granted_by"], ["users.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("user_id", "role_id"),
sa.UniqueConstraint("user_id", "role_id",
name="uq_user_roles_user_role"),
)
op.create_index(
"ix_user_roles_role_id",
"user_roles",
["role_id"],
unique=False,
)
# Seed roles and default admin
roles_table = table(
"roles",
column("id", sa.Integer()),
column("name", sa.String()),
column("display_name", sa.String()),
column("description", sa.Text()),
)
op.bulk_insert(
roles_table,
[
{
"id": 1,
"name": "admin",
"display_name": "Administrator",
"description": "Full platform access with user management rights.",
},
{
"id": 2,
"name": "project_manager",
"display_name": "Project Manager",
"description": "Manage projects, scenarios, and associated data.",
},
{
"id": 3,
"name": "analyst",
"display_name": "Analyst",
"description": "Review dashboards and scenario outputs.",
},
{
"id": 4,
"name": "viewer",
"display_name": "Viewer",
"description": "Read-only access to assigned projects and reports.",
},
],
)
admin_password_hash = password_context.hash("ChangeMe123!")
users_table = table(
"users",
column("id", sa.Integer()),
column("email", sa.String()),
column("username", sa.String()),
column("password_hash", sa.String()),
column("is_active", sa.Boolean()),
column("is_superuser", sa.Boolean()),
)
op.bulk_insert(
users_table,
[
{
"id": 1,
"email": "admin@calminer.local",
"username": "admin",
"password_hash": admin_password_hash,
"is_active": True,
"is_superuser": True,
}
],
)
user_roles_table = table(
"user_roles",
column("user_id", sa.Integer()),
column("role_id", sa.Integer()),
column("granted_by", sa.Integer()),
)
op.bulk_insert(
user_roles_table,
[
{
"user_id": 1,
"role_id": 1,
"granted_by": 1,
}
],
)
# Ensure a default pricing settings record exists for future project linkage
_ensure_default_pricing_settings(bind)
def downgrade() -> None:
# Drop RBAC
op.drop_index("ix_user_roles_role_id", table_name="user_roles")
op.drop_table("user_roles")
op.drop_table("roles")
op.drop_index("ix_users_active_superuser", table_name="users")
op.drop_table("users")
# Drop domain tables
op.drop_index(
op.f("ix_simulation_parameters_scenario_id"),
table_name="simulation_parameters",
)
op.drop_index(op.f("ix_simulation_parameters_id"),
table_name="simulation_parameters")
op.drop_table("simulation_parameters")
op.drop_index(
op.f("ix_financial_inputs_scenario_id"), table_name="financial_inputs"
)
op.drop_index(op.f("ix_financial_inputs_id"),
table_name="financial_inputs")
op.drop_table("financial_inputs")
op.drop_index(op.f("ix_scenarios_project_id"), table_name="scenarios")
op.drop_index(op.f("ix_scenarios_id"), table_name="scenarios")
op.drop_table("scenarios")
op.drop_index("ix_projects_pricing_settings_id", table_name="projects")
op.drop_index(op.f("ix_projects_id"), table_name="projects")
op.drop_table("projects")
# Drop pricing settings ancillary tables
op.drop_index(
op.f("ix_pricing_impurity_settings_pricing_settings_id"),
table_name="pricing_impurity_settings",
)
op.drop_index(
op.f("ix_pricing_impurity_settings_id"),
table_name="pricing_impurity_settings",
)
op.drop_table("pricing_impurity_settings")
op.drop_index(
op.f("ix_pricing_metal_settings_pricing_settings_id"),
table_name="pricing_metal_settings",
)
op.drop_index(
op.f("ix_pricing_metal_settings_id"),
table_name="pricing_metal_settings",
)
op.drop_table("pricing_metal_settings")
op.drop_index(op.f("ix_pricing_settings_id"),
table_name="pricing_settings")
op.drop_table("pricing_settings")
# Drop enumerations
resource_type.drop(op.get_bind(), checkfirst=True)
stochastic_variable.drop(op.get_bind(), checkfirst=True)
distribution_type.drop(op.get_bind(), checkfirst=True)
cost_bucket.drop(op.get_bind(), checkfirst=True)
financial_category.drop(op.get_bind(), checkfirst=True)
scenario_status.drop(op.get_bind(), checkfirst=True)
mining_operation_type.drop(op.get_bind(), checkfirst=True)

View File

@@ -1,38 +0,0 @@
"""Add performance_metrics table"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "20251111_01"
down_revision = "20251111_00"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"performance_metrics",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("timestamp", sa.DateTime(), nullable=True),
sa.Column("metric_name", sa.String(), nullable=True),
sa.Column("value", sa.Float(), nullable=True),
sa.Column("labels", sa.String(), nullable=True),
sa.Column("endpoint", sa.String(), nullable=True),
sa.Column("method", sa.String(), nullable=True),
sa.Column("status_code", sa.Integer(), nullable=True),
sa.Column("duration_seconds", sa.Float(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_performance_metrics_timestamp"), "performance_metrics", ["timestamp"], unique=False)
op.create_index(op.f("ix_performance_metrics_metric_name"), "performance_metrics", ["metric_name"], unique=False)
op.create_index(op.f("ix_performance_metrics_endpoint"), "performance_metrics", ["endpoint"], unique=False)
def downgrade() -> None:
op.drop_index(op.f("ix_performance_metrics_endpoint"), table_name="performance_metrics")
op.drop_index(op.f("ix_performance_metrics_metric_name"), table_name="performance_metrics")
op.drop_index(op.f("ix_performance_metrics_timestamp"), table_name="performance_metrics")
op.drop_table("performance_metrics")

View File

@@ -1,134 +0,0 @@
"""Add metadata columns to roles table"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "20251112_00_add_roles_metadata_columns"
down_revision = "20251111_01"
branch_labels = None
depends_on = None
ROLE_BACKFILL = (
("admin", "Administrator", "Full platform access with user management rights."),
(
"project_manager",
"Project Manager",
"Manage projects, scenarios, and associated data.",
),
("analyst", "Analyst", "Review dashboards and scenario outputs."),
(
"viewer",
"Viewer",
"Read-only access to assigned projects and reports.",
),
)
def upgrade() -> None:
op.add_column(
"roles",
sa.Column("display_name", sa.String(length=128), nullable=True),
)
op.add_column(
"roles",
sa.Column("description", sa.Text(), nullable=True),
)
op.add_column(
"roles",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=True,
server_default=sa.text("timezone('UTC', now())"),
),
)
op.add_column(
"roles",
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=True,
server_default=sa.text("timezone('UTC', now())"),
),
)
connection = op.get_bind()
for name, display_name, description in ROLE_BACKFILL:
connection.execute(
sa.text(
"""
UPDATE roles
SET display_name = :display_name,
description = COALESCE(description, :description)
WHERE name = :name
AND display_name IS NULL
"""
),
{
"name": name,
"display_name": display_name,
"description": description,
},
)
connection.execute(
sa.text(
"""
UPDATE roles
SET display_name = INITCAP(REPLACE(name, '_', ' '))
WHERE display_name IS NULL
"""
)
)
connection.execute(
sa.text(
"""
UPDATE roles
SET created_at = timezone('UTC', now())
WHERE created_at IS NULL
"""
)
)
connection.execute(
sa.text(
"""
UPDATE roles
SET updated_at = timezone('UTC', now())
WHERE updated_at IS NULL
"""
)
)
op.alter_column(
"roles",
"display_name",
existing_type=sa.String(length=128),
nullable=False,
)
op.alter_column(
"roles",
"created_at",
existing_type=sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("timezone('UTC', now())"),
)
op.alter_column(
"roles",
"updated_at",
existing_type=sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("timezone('UTC', now())"),
)
def downgrade() -> None:
op.drop_column("roles", "updated_at")
op.drop_column("roles", "created_at")
op.drop_column("roles", "description")
op.drop_column("roles", "display_name")

View File

@@ -3,6 +3,7 @@
## 2025-11-12
- Switched `models/performance_metric.py` to reuse the shared declarative base from `config.database`, clearing the SQLAlchemy 2.0 `declarative_base` deprecation warning and verifying repository tests still pass.
- Replaced the Alembic migration workflow with the idempotent Pydantic-backed initializer (`scripts/init_db.py`), added a guarded reset utility (`scripts/reset_db.py`), removed migration artifacts/tooling (Alembic directory, config, Docker entrypoint), refreshed the container entrypoint to invoke `uvicorn` directly, and updated installation/architecture docs plus the README to direct developers to the new seeding/reset flow.
- Eliminated Bandit hardcoded-secret findings by replacing literal JWT tokens and passwords across auth/security tests with randomized helpers drawn from `tests/utils/security.py`, ensuring fixtures still assert expected behaviours.
- Centralized Bandit configuration in `pyproject.toml`, reran `bandit -c pyproject.toml -r calminer tests`, and verified the scan now reports zero issues.
- Updated `.github/instructions/TODO.md` and `.github/instructions/DONE.md` to reflect the completed security scan remediation workflow.
@@ -13,6 +14,7 @@
## 2025-11-11
- Collapsed legacy Alembic revisions into `alembic/versions/00_initial.py`, removed superseded migration files, and verified the consolidated schema via SQLite upgrade and Postgres version stamping.
- Implemented base URL routing to redirect unauthenticated users to login and authenticated users to dashboard.
- Added comprehensive end-to-end tests for login flow, including redirects, session handling, and error messaging for invalid/inactive accounts.
- Updated header and footer templates to consistently use `logo_big.png` image instead of text logo, with appropriate CSS styling for sizing.

View File

@@ -54,7 +54,15 @@ def _build_database_url() -> str:
DATABASE_URL = _build_database_url()
engine = create_engine(DATABASE_URL, echo=True, future=True)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Avoid expiring ORM objects on commit so that objects returned from UnitOfWork
# remain usable for the duration of the request cycle without causing
# DetachedInstanceError when accessed after the session commits.
SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=engine,
expire_on_commit=False,
)
Base = declarative_base()

40
main.py
View File

@@ -17,6 +17,7 @@ from routes.reports import router as reports_router
from routes.scenarios import router as scenarios_router
from monitoring import router as monitoring_router
from services.bootstrap import bootstrap_admin, bootstrap_pricing_settings
from scripts.init_db import init_db as init_db_script
app = FastAPI()
@@ -44,6 +45,14 @@ async def ensure_admin_bootstrap() -> None:
admin_settings = settings.admin_bootstrap_settings()
pricing_metadata = settings.pricing_metadata()
try:
# Ensure DB schema/types/seeds required for bootstrapping exist.
# The initializer is idempotent and safe to run on every startup.
try:
init_db_script()
except Exception:
logger.exception(
"DB initializer failed; continuing to bootstrap (non-fatal)")
role_result, admin_result = bootstrap_admin(settings=admin_settings)
pricing_result = bootstrap_pricing_settings(metadata=pricing_metadata)
logger.info(
@@ -54,14 +63,29 @@ async def ensure_admin_bootstrap() -> None:
admin_result.password_rotated,
admin_result.roles_granted,
)
logger.info(
"Pricing settings bootstrap completed: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
pricing_result.seed.settings.slug,
pricing_result.seed.created,
pricing_result.seed.updated_fields,
pricing_result.seed.impurity_upserts,
pricing_result.projects_assigned,
)
# Avoid accessing ORM-managed attributes that may be detached outside
# of the UnitOfWork/session scope. Attempt a safe extraction and
# fall back to minimal logging if attributes are unavailable.
try:
seed = pricing_result.seed
slug = getattr(seed.settings, "slug", None) if seed and getattr(
seed, "settings", None) else None
created = getattr(seed, "created", None)
updated_fields = getattr(seed, "updated_fields", None)
impurity_upserts = getattr(seed, "impurity_upserts", None)
logger.info(
"Pricing settings bootstrap completed: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
slug,
created,
updated_fields,
impurity_upserts,
pricing_result.projects_assigned,
)
except Exception:
logger.info(
"Pricing settings bootstrap completed (partial): projects_assigned=%s",
pricing_result.projects_assigned,
)
except Exception: # pragma: no cover - defensive logging
logger.exception(
"Failed to bootstrap administrator or pricing settings")

View File

@@ -8,6 +8,7 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoin
from starlette.types import ASGIApp
from config.settings import Settings, get_settings
from sqlalchemy.orm.exc import DetachedInstanceError
from models import User
from monitoring.metrics import ACTIVE_CONNECTIONS
from services.exceptions import EntityNotFoundError
@@ -66,21 +67,42 @@ class AuthSessionMiddleware(BaseHTTPMiddleware):
resolved = self._resolve_session(request)
# Track active sessions for authenticated users
if resolved.session.user and resolved.session.user.is_active:
try:
user_active = bool(resolved.session.user and getattr(
resolved.session.user, "is_active", False))
except DetachedInstanceError:
user_active = False
if user_active:
AuthSessionMiddleware._active_sessions += 1
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
response: Response | None = None
try:
response = await call_next(request)
return response
finally:
# Decrement on response
if resolved.session.user and resolved.session.user.is_active:
# Always decrement the active sessions counter if we incremented it.
if user_active:
AuthSessionMiddleware._active_sessions = max(
0, AuthSessionMiddleware._active_sessions - 1)
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
self._apply_session(response, resolved)
# Only apply session cookies if a response was produced by downstream
# application. If an exception occurred before a response was created
# we avoid raising another error here.
import logging
if response is not None:
try:
self._apply_session(response, resolved)
except Exception:
logging.getLogger(__name__).exception(
"Failed to apply session cookies to response"
)
else:
logging.getLogger(__name__).debug(
"AuthSessionMiddleware: no response produced by downstream app (response is None)"
)
def _resolve_session(self, request: Request) -> _ResolutionResult:
settings = self._settings_provider()

View File

@@ -10,10 +10,14 @@ async def validate_json(
) -> Response:
# Only validate JSON for requests with a body
if request.method in ("POST", "PUT", "PATCH"):
try:
# attempt to parse json body
await request.json()
except Exception:
raise HTTPException(status_code=400, detail="Invalid JSON payload")
# Only attempt JSON parsing when the client indicates a JSON content type.
content_type = (request.headers.get("content-type") or "").lower()
if "json" in content_type:
try:
# attempt to parse json body
await request.json()
except Exception:
raise HTTPException(
status_code=400, detail="Invalid JSON payload")
response = await call_next(request)
return response

View File

@@ -1,14 +1,11 @@
"""Database models and shared metadata for the CalMiner domain."""
from .financial_input import FinancialCategory, FinancialInput
from .financial_input import FinancialInput
from .metadata import (
COST_BUCKET_METADATA,
RESOURCE_METADATA,
STOCHASTIC_VARIABLE_METADATA,
CostBucket,
ResourceDescriptor,
ResourceType,
StochasticVariable,
StochasticVariableDescriptor,
)
from .performance_metric import PerformanceMetric
@@ -17,9 +14,18 @@ from .pricing_settings import (
PricingMetalSettings,
PricingSettings,
)
from .project import MiningOperationType, Project
from .scenario import Scenario, ScenarioStatus
from .simulation_parameter import DistributionType, SimulationParameter
from .enums import (
CostBucket,
DistributionType,
FinancialCategory,
MiningOperationType,
ResourceType,
ScenarioStatus,
StochasticVariable,
)
from .project import Project
from .scenario import Scenario
from .simulation_parameter import SimulationParameter
from .user import Role, User, UserRole, password_context
__all__ = [

81
models/enums.py Normal file
View File

@@ -0,0 +1,81 @@
from __future__ import annotations
from enum import Enum
class MiningOperationType(str, Enum):
"""Supported mining operation categories."""
OPEN_PIT = "open_pit"
UNDERGROUND = "underground"
IN_SITU_LEACH = "in_situ_leach"
PLACER = "placer"
QUARRY = "quarry"
MOUNTAINTOP_REMOVAL = "mountaintop_removal"
OTHER = "other"
class ScenarioStatus(str, Enum):
"""Lifecycle states for project scenarios."""
DRAFT = "draft"
ACTIVE = "active"
ARCHIVED = "archived"
class FinancialCategory(str, Enum):
"""Enumeration of cost and revenue classifications."""
CAPITAL_EXPENDITURE = "capex"
OPERATING_EXPENDITURE = "opex"
REVENUE = "revenue"
CONTINGENCY = "contingency"
OTHER = "other"
class DistributionType(str, Enum):
"""Supported stochastic distribution families for simulations."""
NORMAL = "normal"
TRIANGULAR = "triangular"
UNIFORM = "uniform"
LOGNORMAL = "lognormal"
CUSTOM = "custom"
class ResourceType(str, Enum):
"""Primary consumables and resources used in mining operations."""
DIESEL = "diesel"
ELECTRICITY = "electricity"
WATER = "water"
EXPLOSIVES = "explosives"
REAGENTS = "reagents"
LABOR = "labor"
EQUIPMENT_HOURS = "equipment_hours"
TAILINGS_CAPACITY = "tailings_capacity"
class CostBucket(str, Enum):
"""Granular cost buckets aligned with project accounting."""
CAPITAL_INITIAL = "capital_initial"
CAPITAL_SUSTAINING = "capital_sustaining"
OPERATING_FIXED = "operating_fixed"
OPERATING_VARIABLE = "operating_variable"
MAINTENANCE = "maintenance"
RECLAMATION = "reclamation"
ROYALTIES = "royalties"
GENERAL_ADMIN = "general_admin"
class StochasticVariable(str, Enum):
"""Domain variables that typically require probabilistic modelling."""
ORE_GRADE = "ore_grade"
RECOVERY_RATE = "recovery_rate"
METAL_PRICE = "metal_price"
OPERATING_COST = "operating_cost"
CAPITAL_COST = "capital_cost"
DISCOUNT_RATE = "discount_rate"
THROUGHPUT = "throughput"

View File

@@ -1,7 +1,6 @@
from __future__ import annotations
from datetime import date, datetime
from enum import Enum
from typing import TYPE_CHECKING
from sqlalchemy import (
@@ -19,23 +18,13 @@ from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
from sqlalchemy.sql import func
from config.database import Base
from .metadata import CostBucket
from .enums import CostBucket, FinancialCategory
from services.currency import normalise_currency
if TYPE_CHECKING: # pragma: no cover
from .scenario import Scenario
class FinancialCategory(str, Enum):
"""Enumeration of cost and revenue classifications."""
CAPITAL_EXPENDITURE = "capex"
OPERATING_EXPENDITURE = "opex"
REVENUE = "revenue"
CONTINGENCY = "contingency"
OTHER = "other"
class FinancialInput(Base):
"""Line-item financial assumption attached to a scenario."""
@@ -47,10 +36,10 @@ class FinancialInput(Base):
)
name: Mapped[str] = mapped_column(String(255), nullable=False)
category: Mapped[FinancialCategory] = mapped_column(
SQLEnum(FinancialCategory), nullable=False
SQLEnum(FinancialCategory, name="financialcategory", create_type=False), nullable=False
)
cost_bucket: Mapped[CostBucket | None] = mapped_column(
SQLEnum(CostBucket), nullable=True
SQLEnum(CostBucket, name="costbucket", create_type=False), nullable=True
)
amount: Mapped[float] = mapped_column(Numeric(18, 2), nullable=False)
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)

View File

@@ -1,45 +1,7 @@
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
class ResourceType(str, Enum):
"""Primary consumables and resources used in mining operations."""
DIESEL = "diesel"
ELECTRICITY = "electricity"
WATER = "water"
EXPLOSIVES = "explosives"
REAGENTS = "reagents"
LABOR = "labor"
EQUIPMENT_HOURS = "equipment_hours"
TAILINGS_CAPACITY = "tailings_capacity"
class CostBucket(str, Enum):
"""Granular cost buckets aligned with project accounting."""
CAPITAL_INITIAL = "capital_initial"
CAPITAL_SUSTAINING = "capital_sustaining"
OPERATING_FIXED = "operating_fixed"
OPERATING_VARIABLE = "operating_variable"
MAINTENANCE = "maintenance"
RECLAMATION = "reclamation"
ROYALTIES = "royalties"
GENERAL_ADMIN = "general_admin"
class StochasticVariable(str, Enum):
"""Domain variables that typically require probabilistic modelling."""
ORE_GRADE = "ore_grade"
RECOVERY_RATE = "recovery_rate"
METAL_PRICE = "metal_price"
OPERATING_COST = "operating_cost"
CAPITAL_COST = "capital_cost"
DISCOUNT_RATE = "discount_rate"
THROUGHPUT = "throughput"
from .enums import ResourceType, CostBucket, StochasticVariable
@dataclass(frozen=True)

View File

@@ -1,9 +1,10 @@
from __future__ import annotations
from datetime import datetime
from enum import Enum
from typing import TYPE_CHECKING, List
from .enums import MiningOperationType
from sqlalchemy import DateTime, Enum as SQLEnum, ForeignKey, Integer, String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.sql import func
@@ -15,16 +16,6 @@ if TYPE_CHECKING: # pragma: no cover
from .pricing_settings import PricingSettings
class MiningOperationType(str, Enum):
"""Supported mining operation categories."""
OPEN_PIT = "open_pit"
UNDERGROUND = "underground"
IN_SITU_LEACH = "in_situ_leach"
PLACER = "placer"
QUARRY = "quarry"
MOUNTAINTOP_REMOVAL = "mountaintop_removal"
OTHER = "other"
class Project(Base):
@@ -36,7 +27,9 @@ class Project(Base):
name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
location: Mapped[str | None] = mapped_column(String(255), nullable=True)
operation_type: Mapped[MiningOperationType] = mapped_column(
SQLEnum(MiningOperationType), nullable=False, default=MiningOperationType.OTHER
SQLEnum(MiningOperationType, name="miningoperationtype", create_type=False),
nullable=False,
default=MiningOperationType.OTHER,
)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
pricing_settings_id: Mapped[int | None] = mapped_column(

View File

@@ -1,7 +1,6 @@
from __future__ import annotations
from datetime import date, datetime
from enum import Enum
from typing import TYPE_CHECKING, List
from sqlalchemy import (
@@ -13,13 +12,14 @@ from sqlalchemy import (
Numeric,
String,
Text,
UniqueConstraint,
)
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
from sqlalchemy.sql import func
from config.database import Base
from services.currency import normalise_currency
from .metadata import ResourceType
from .enums import ResourceType, ScenarioStatus
if TYPE_CHECKING: # pragma: no cover
from .financial_input import FinancialInput
@@ -27,18 +27,14 @@ if TYPE_CHECKING: # pragma: no cover
from .simulation_parameter import SimulationParameter
class ScenarioStatus(str, Enum):
"""Lifecycle states for project scenarios."""
DRAFT = "draft"
ACTIVE = "active"
ARCHIVED = "archived"
class Scenario(Base):
"""A specific configuration of assumptions for a project."""
__tablename__ = "scenarios"
__table_args__ = (
UniqueConstraint("project_id", "name",
name="uq_scenarios_project_name"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
project_id: Mapped[int] = mapped_column(
@@ -47,7 +43,9 @@ class Scenario(Base):
name: Mapped[str] = mapped_column(String(255), nullable=False)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
status: Mapped[ScenarioStatus] = mapped_column(
SQLEnum(ScenarioStatus), nullable=False, default=ScenarioStatus.DRAFT
SQLEnum(ScenarioStatus, name="scenariostatus", create_type=False),
nullable=False,
default=ScenarioStatus.DRAFT,
)
start_date: Mapped[date | None] = mapped_column(Date, nullable=True)
end_date: Mapped[date | None] = mapped_column(Date, nullable=True)
@@ -55,7 +53,7 @@ class Scenario(Base):
Numeric(5, 2), nullable=True)
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)
primary_resource: Mapped[ResourceType | None] = mapped_column(
SQLEnum(ResourceType), nullable=True
SQLEnum(ResourceType, name="resourcetype", create_type=False), nullable=True
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False, server_default=func.now()

View File

@@ -1,9 +1,10 @@
from __future__ import annotations
from datetime import datetime
from enum import Enum
from typing import TYPE_CHECKING
from .enums import DistributionType, ResourceType, StochasticVariable
from sqlalchemy import (
JSON,
DateTime,
@@ -17,22 +18,11 @@ from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.sql import func
from config.database import Base
from .metadata import ResourceType, StochasticVariable
if TYPE_CHECKING: # pragma: no cover
from .scenario import Scenario
class DistributionType(str, Enum):
"""Supported stochastic distribution families for simulations."""
NORMAL = "normal"
TRIANGULAR = "triangular"
UNIFORM = "uniform"
LOGNORMAL = "lognormal"
CUSTOM = "custom"
class SimulationParameter(Base):
"""Probability distribution settings for scenario simulations."""
@@ -44,13 +34,13 @@ class SimulationParameter(Base):
)
name: Mapped[str] = mapped_column(String(255), nullable=False)
distribution: Mapped[DistributionType] = mapped_column(
SQLEnum(DistributionType), nullable=False
SQLEnum(DistributionType, name="distributiontype", create_type=False), nullable=False
)
variable: Mapped[StochasticVariable | None] = mapped_column(
SQLEnum(StochasticVariable), nullable=True
SQLEnum(StochasticVariable, name="stochasticvariable", create_type=False), nullable=True
)
resource_type: Mapped[ResourceType | None] = mapped_column(
SQLEnum(ResourceType), nullable=True
SQLEnum(ResourceType, name="resourcetype", create_type=False), nullable=True
)
mean_value: Mapped[float | None] = mapped_column(
Numeric(18, 4), nullable=True)

View File

@@ -27,7 +27,6 @@ branch = true
source = ["."]
omit = [
"tests/*",
"alembic/*",
"scripts/*",
"main.py",
"routes/reports.py",
@@ -39,6 +38,6 @@ skip_empty = true
show_missing = true
[tool.bandit]
exclude_dirs = ["alembic", "scripts"]
exclude_dirs = ["scripts"]
skips = ["B101", "B601"] # B101: assert_used, B601: shell_injection (may be false positives)

View File

@@ -1,2 +1 @@
-r requirements.txt
alembic
-r requirements.txt

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env sh
set -e
PYTHONPATH="/app:${PYTHONPATH}"
export PYTHONPATH
python -m scripts.run_migrations
exec "$@"

View File

@@ -1,42 +0,0 @@
"""Utility for applying Alembic migrations before application startup."""
from __future__ import annotations
import logging
from pathlib import Path
from alembic import command
from alembic.config import Config
from dotenv import load_dotenv
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def _load_env() -> None:
"""Ensure environment variables from .env are available."""
load_dotenv()
def _alembic_config(project_root: Path) -> Config:
config_path = project_root / "alembic.ini"
if not config_path.exists():
raise FileNotFoundError(f"Missing alembic.ini at {config_path}")
config = Config(str(config_path))
config.set_main_option("script_location", str(project_root / "alembic"))
return config
def run_migrations(target_revision: str = "head") -> None:
"""Apply Alembic migrations up to the given revision."""
project_root = Path(__file__).resolve().parent.parent
_load_env()
config = _alembic_config(project_root)
logger.info("Applying database migrations up to %s", target_revision)
command.upgrade(config, target_revision)
logger.info("Database migrations applied successfully")
if __name__ == "__main__":
run_migrations()

View File

@@ -162,12 +162,21 @@ def bootstrap_pricing_settings(
uow.set_project_pricing_settings(project, default_settings)
assigned += 1
logger.info(
"Pricing bootstrap result: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
seed_result.settings.slug,
seed_result.created,
seed_result.updated_fields,
seed_result.impurity_upserts,
assigned,
)
return PricingBootstrapResult(seed=seed_result, projects_assigned=assigned)
# Capture logging-safe primitives while the UnitOfWork (and session)
# are still active to avoid DetachedInstanceError when accessing ORM
# instances outside the session scope.
seed_slug = seed_result.settings.slug if seed_result and seed_result.settings else None
seed_created = getattr(seed_result, "created", None)
seed_updated_fields = getattr(seed_result, "updated_fields", None)
seed_impurity_upserts = getattr(seed_result, "impurity_upserts", None)
logger.info(
"Pricing bootstrap result: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
seed_slug,
seed_created,
seed_updated_fields,
seed_impurity_upserts,
assigned,
)
return PricingBootstrapResult(seed=seed_result, projects_assigned=assigned)

View File

@@ -27,6 +27,11 @@ from services.export_query import ProjectExportFilters, ScenarioExportFilters
from services.pricing import PricingMetadata
def _enum_value(e):
"""Return the underlying value for Enum members, otherwise return as-is."""
return getattr(e, "value", e)
class ProjectRepository:
"""Persistence operations for Project entities."""
@@ -202,7 +207,9 @@ class ScenarioRepository:
return self.session.execute(stmt).scalar_one()
def count_by_status(self, status: ScenarioStatus) -> int:
stmt = select(func.count(Scenario.id)).where(Scenario.status == status)
status_val = _enum_value(status)
stmt = select(func.count(Scenario.id)).where(
Scenario.status == status_val)
return self.session.execute(stmt).scalar_one()
def recent(self, limit: int = 5, *, with_project: bool = False) -> Sequence[Scenario]:
@@ -219,9 +226,10 @@ class ScenarioRepository:
limit: int | None = None,
with_project: bool = False,
) -> Sequence[Scenario]:
status_val = _enum_value(status)
stmt = (
select(Scenario)
.where(Scenario.status == status)
.where(Scenario.status == status_val)
.order_by(Scenario.updated_at.desc())
)
if with_project:
@@ -311,7 +319,11 @@ class ScenarioRepository:
stmt = stmt.where(Scenario.name.ilike(name_pattern))
if filters.statuses:
stmt = stmt.where(Scenario.status.in_(filters.statuses))
# Accept Enum members or raw values in filters.statuses
status_values = [
_enum_value(s) for s in (filters.statuses or [])
]
stmt = stmt.where(Scenario.status.in_(status_values))
if filters.start_date_from:
stmt = stmt.where(Scenario.start_date >=

View File

@@ -0,0 +1,30 @@
from fastapi.testclient import TestClient
from main import app
def test_login_form_post_does_not_trigger_json_error():
"""POST form-encoded data to /login and assert middleware doesn't return
the JSON "Invalid JSON payload" error which indicates the middleware
attempted to parse non-JSON bodies.
"""
client = TestClient(app)
resp = client.post(
"/login",
data={"username": "no-such-user", "password": "x"},
headers={"Accept": "text/html"},
)
content_type = resp.headers.get("content-type", "")
# If middleware raised the JSON error we'd get an application/json body
# with detail == "Invalid JSON payload". Ensure that's not the case.
if content_type.startswith("application/json"):
body = resp.json()
assert body.get("detail") != "Invalid JSON payload", (
"Middleware attempted to parse non-JSON body as JSON and failed"
)
# At minimum the endpoint should not error with the JSON payload message.
assert True

View File

@@ -0,0 +1,22 @@
from sqlalchemy import Enum as SQLEnum
from models.enums import (
MiningOperationType,
ScenarioStatus,
FinancialCategory,
DistributionType,
)
def test_enum_members_and_sql_names():
# Verify enum members exist
assert MiningOperationType.OTHER.value == "other"
assert ScenarioStatus.DRAFT.value == "draft"
assert FinancialCategory.REVENUE.value == "revenue"
assert DistributionType.NORMAL.value == "normal"
# Verify SQLAlchemy SQLEnum name mapping is consistent
assert SQLEnum(MiningOperationType, name="miningoperationtype").name == "miningoperationtype"
assert SQLEnum(ScenarioStatus, name="scenariostatus").name == "scenariostatus"
assert SQLEnum(FinancialCategory, name="financialcategory").name == "financialcategory"
assert SQLEnum(DistributionType, name="distributiontype").name == "distributiontype"