diff --git a/.gitea/workflows/test.yml b/.gitea/workflows/test.yml index 1076056..01eacb5 100644 --- a/.gitea/workflows/test.yml +++ b/.gitea/workflows/test.yml @@ -3,6 +3,20 @@ on: [push] jobs: test: + services: + postgres: + image: postgres:16-alpine + env: + POSTGRES_DB: calminer_ci + POSTGRES_USER: calminer + POSTGRES_PASSWORD: secret + ports: + - 5432:5432 + options: >- + --health-cmd "pg_isready -U calminer -d calminer_ci" + --health-interval 10s + --health-timeout 5s + --health-retries 10 runs-on: ubuntu-latest steps: - name: Checkout code @@ -12,7 +26,7 @@ jobs: with: python-version: "3.10" - name: Cache pip - uses: https://github.com/actions/cache@v4 + uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} @@ -22,7 +36,34 @@ jobs: run: | pip install -r requirements.txt pip install -r requirements-test.txt + - name: Run database setup (dry run) + env: + DATABASE_DRIVER: postgresql + DATABASE_HOST: 127.0.0.1 + DATABASE_PORT: "5432" + DATABASE_NAME: calminer_ci + DATABASE_USER: calminer + DATABASE_PASSWORD: secret + DATABASE_SCHEMA: public + DATABASE_SUPERUSER: calminer + DATABASE_SUPERUSER_PASSWORD: secret + DATABASE_SUPERUSER_DB: calminer_ci + run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v + - name: Run database setup + env: + DATABASE_DRIVER: postgresql + DATABASE_HOST: 127.0.0.1 + DATABASE_PORT: "5432" + DATABASE_NAME: calminer_ci + DATABASE_USER: calminer + DATABASE_PASSWORD: secret + DATABASE_SCHEMA: public + DATABASE_SUPERUSER: calminer + DATABASE_SUPERUSER_PASSWORD: secret + DATABASE_SUPERUSER_DB: calminer_ci + run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v - name: Run tests env: - DATABASE_URL: sqlite:///./test_calminer.db + DATABASE_URL: postgresql+psycopg2://calminer:secret@127.0.0.1:5432/calminer_ci + DATABASE_SCHEMA: public run: pytest diff --git a/config/setup_test.env.example b/config/setup_test.env.example new file mode 100644 index 0000000..bd77ec6 --- /dev/null +++ b/config/setup_test.env.example @@ -0,0 +1,13 @@ +# Sample environment configuration for running scripts/setup_database.py against a test instance +DATABASE_DRIVER=postgresql +DATABASE_HOST=192.168.88.35 +DATABASE_PORT=5432 +DATABASE_NAME=calminer_test +DATABASE_USER=calminer_test +DATABASE_PASSWORD=calminer_test_password +DATABASE_SCHEMA=public + +# Admin connection used for provisioning database and roles +DATABASE_SUPERUSER=postgres +DATABASE_SUPERUSER_PASSWORD=M11ffpgm. +DATABASE_SUPERUSER_DB=postgres diff --git a/docs/architecture/02_architecture_constraints.md b/docs/architecture/02_architecture_constraints.md index 76e9614..d9e41d4 100644 --- a/docs/architecture/02_architecture_constraints.md +++ b/docs/architecture/02_architecture_constraints.md @@ -10,22 +10,58 @@ status: skeleton > e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates. +The architecture of CalMiner is influenced by several technical constraints that shape its design and implementation: + +1. **Framework Selection**: The choice of FastAPI as the web framework imposes constraints on how the application handles requests, routing, and middleware. FastAPI's asynchronous capabilities must be leveraged appropriately to ensure optimal performance. +2. **Database Technology**: The use of PostgreSQL as the primary database system dictates the data modeling, querying capabilities, and transaction management strategies. SQLAlchemy ORM is used for database interactions, which requires adherence to its conventions and limitations. +3. **Frontend Technologies**: The decision to use Jinja2 for server-side templating and Chart.js for data visualization influences the structure of the frontend code and the way dynamic content is rendered. +4. **Simulation Logic**: The Monte Carlo simulation logic must be designed to efficiently handle large datasets and perform computations within the constraints of the chosen programming language (Python) and its libraries. + ## Organizational Constraints > e.g., team skillsets, development workflows, CI/CD pipelines. +Restrictions arising from organizational factors include: + +1. **Team Expertise**: The development team’s familiarity with FastAPI, SQLAlchemy, and frontend technologies like Jinja2 and Chart.js influences the architecture choices to ensure maintainability and ease of development. +2. **Development Processes**: The adoption of Agile methodologies and CI/CD pipelines (using Gitea Actions) shapes the architecture to support continuous integration, automated testing, and deployment practices. +3. **Collaboration Tools**: The use of specific collaboration and version control tools (e.g., Gitea) affects how code is managed, reviewed, and integrated, impacting the overall architecture and development workflow. +4. **Documentation Standards**: The requirement for comprehensive documentation (as seen in the `docs/` folder) necessitates an architecture that is well-structured and easy to understand for both current and future team members. +5. **Knowledge Sharing**: The need for effective knowledge sharing and onboarding processes influences the architecture to ensure that it is accessible and understandable for new team members. +6. **Resource Availability**: The availability of hardware, software, and human resources within the organization can impose constraints on the architecture, affecting decisions related to scalability, performance, and feature implementation. + ## Regulatory Constraints > e.g., data privacy laws, industry standards. +Regulatory constraints that impact the architecture of CalMiner include: + +1. **Data Privacy Compliance**: The architecture must ensure compliance with data privacy regulations such as GDPR or CCPA, which may dictate how user data is collected, stored, and processed. +2. **Industry Standards**: Adherence to industry-specific standards and best practices may influence the design of data models, security measures, and reporting functionalities. +3. **Auditability**: The system may need to incorporate logging and auditing features to meet regulatory requirements, affecting the architecture of data storage and access controls. +4. **Data Retention Policies**: Regulatory requirements regarding data retention and deletion may impose constraints on how long certain types of data can be stored, influencing database design and data lifecycle management. +5. **Security Standards**: Compliance with security standards (e.g., ISO/IEC 27001) may necessitate the implementation of specific security measures, such as encryption, access controls, and vulnerability management, which impact the overall architecture. + ## Environmental Constraints > e.g., deployment environments, cloud provider limitations. +Environmental constraints affecting the architecture include: + +1. **Deployment Environments**: The architecture must accommodate various deployment environments (development, testing, production) with differing configurations and resource allocations. +2. **Cloud Provider Limitations**: If deployed on a specific cloud provider, the architecture may need to align with the provider's services, limitations, and best practices, such as using managed databases or specific container orchestration tools. +3. **Containerization**: The use of Docker for containerization imposes constraints on how the application is packaged, deployed, and scaled, influencing the architecture to ensure compatibility with container orchestration platforms. +4. **Scalability Requirements**: The architecture must be designed to scale efficiently based on anticipated load and usage patterns, considering the limitations of the chosen infrastructure. + ## Performance Constraints > e.g., response time requirements, scalability needs. +Current performance constraints include: + +1. **Response Time Requirements**: The architecture must ensure that the system can respond to user requests within a specified time frame, which may impact design decisions related to caching, database queries, and API performance. +2. **Scalability Needs**: The system should be able to handle increased load and user traffic without significant degradation in performance, necessitating a scalable architecture that can grow with demand. + ## Security Constraints > e.g., authentication mechanisms, data encryption standards. diff --git a/docs/architecture/03_context_and_scope.md b/docs/architecture/03_context_and_scope.md index 40c931d..54b2fe0 100644 --- a/docs/architecture/03_context_and_scope.md +++ b/docs/architecture/03_context_and_scope.md @@ -36,3 +36,22 @@ The architecture encompasses the following key areas: 10. **Integration Points**: Interfaces for integrating with external systems and services. 11. **Monitoring and Logging**: Systems for tracking system performance and user activity. 12. **Maintenance and Support**: Processes for ongoing system maintenance and user support. + +## Diagram + +```mermaid +sequenceDiagram + participant PM as Project Manager + participant DA as Data Analyst + participant EX as Executive + participant CM as CalMiner System + + PM->>CM: Create and manage scenarios + DA->>CM: Analyze simulation results + EX->>CM: Review reports and dashboards + CM->>PM: Provide scenario planning tools + CM->>DA: Deliver analysis insights + CM->>EX: Generate high-level reports +``` + +This diagram illustrates the key components of the CalMiner system and their interactions with external actors. diff --git a/docs/idempotency_audit.md b/docs/idempotency_audit.md new file mode 100644 index 0000000..6de28c3 --- /dev/null +++ b/docs/idempotency_audit.md @@ -0,0 +1,31 @@ +# Setup Script Idempotency Audit (2025-10-25) + +This note captures the current evaluation of idempotent behaviour for `scripts/setup_database.py` and outlines follow-up actions. + +## Admin Tasks + +- **ensure_database**: guarded by `SELECT 1 FROM pg_database`; re-runs safely. Failure mode: network issues or lack of privileges surface as psycopg2 errors without additional context. +- **ensure_role**: checks `pg_roles`, creates role if missing, reapplies grants each time. Subsequent runs execute grants again but PostgreSQL tolerates repeated grants. +- **ensure_schema**: uses `information_schema` guard and respects `--dry-run`; idempotent when schema is `public` or already present. + +## Application Tasks + +- **initialize_schema**: relies on SQLAlchemy `create_all(checkfirst=True)`; repeatable. Dry-run output remains descriptive. +- **run_migrations**: new baseline workflow applies `000_base.sql` once and records legacy scripts as applied. Subsequent runs detect the baseline in `schema_migrations` and skip reapplication. + +## Seeding + +- `seed_baseline_data` seeds currencies and measurement units with upsert logic. Verification now raises on missing data, preventing silent failures. +- Running `--seed-data` repeatedly performs `ON CONFLICT` updates, making the operation safe. + +## Outstanding Risks + +1. Baseline migration relies on legacy files being present when first executed; if removed beforehand, old entries are never marked. (Low risk given repository state.) +2. `ensure_database` and `ensure_role` do not wrap SQL execution errors with additional context beyond psycopg2 messages. +3. Baseline verification assumes migrations and seeding run in the same process; manual runs of `scripts/seed_data.py` without the baseline could still fail. + +## Recommended Actions + +- Add regression tests ensuring repeated executions of key CLI paths (`--run-migrations`, `--seed-data`) result in no-op behaviour after the first run. +- Extend logging/error handling for admin operations to provide clearer messages on repeated failures. +- Consider a preflight check when migrations directory lacks legacy files but baseline is pending, warning about potential drift. diff --git a/docs/logging_audit.md b/docs/logging_audit.md new file mode 100644 index 0000000..ccdedc2 --- /dev/null +++ b/docs/logging_audit.md @@ -0,0 +1,29 @@ +# Setup Script Logging Audit (2025-10-25) + +The following observations capture current logging behaviour in `scripts/setup_database.py` and highlight areas requiring improved error handling and messaging. + +## Connection Validation + +- `validate_admin_connection` and `validate_application_connection` log entry/exit messages and raise `RuntimeError` with context if connection fails. This coverage is sufficient. +- `ensure_database` logs creation states but does not surface connection or SQL exceptions beyond the initial connection acquisition. When the inner `cursor.execute` calls fail, the exceptions bubble without contextual logging. + +## Migration Runner + +- Lists pending migrations and logs each application attempt. +- When the baseline is pending, the script logs whether it is a dry-run or live application and records legacy file marking. However, if `_apply_migration_file` raises an exception, the caller re-raises after logging the failure; there is no wrapping message guiding users toward manual cleanup. +- Legacy migration marking happens silently (just info logs). Failures during the insert into `schema_migrations` would currently propagate without added guidance. + +## Seeding Workflow + +- `seed_baseline_data` announces each seeding phase and skips verification in dry-run mode with a log breadcrumb. +- `_verify_seeded_data` warns about missing currencies/units and inactive defaults but does **not** raise errors, meaning CI can pass while the database is incomplete. There is no explicit log when verification succeeds. +- `_seed_units` logs when the `measurement_unit` table is missing, which is helpful, but the warning is the only feedback; no exception is raised. + +## Suggested Enhancements + +1. Wrap baseline application and legacy marking in `try/except` blocks that log actionable remediation steps before re-raising. +2. Promote seed verification failures (missing or inactive records) to exceptions so automated workflows fail fast; add success logs for clarity. +3. Add contextual logging around currency/measurement-unit insert failures, particularly around `execute_values` calls, to aid debugging malformed data. +4. Introduce structured logging (log codes or phases) for major steps (`CONNECT`, `MIGRATE`, `SEED`, `VERIFY`) to make scanning log files easier. + +These findings inform the remaining TODO subtasks for enhanced error handling. diff --git a/docs/migrations/consolidated_baseline_plan.md b/docs/migrations/consolidated_baseline_plan.md new file mode 100644 index 0000000..b7cdc32 --- /dev/null +++ b/docs/migrations/consolidated_baseline_plan.md @@ -0,0 +1,53 @@ +# Consolidated Migration Baseline Plan + +This note outlines the content and structure of the planned baseline migration (`scripts/migrations/000_base.sql`). The objective is to capture the currently required schema changes in a single idempotent script so that fresh environments only need to apply one SQL file before proceeding with incremental migrations. + +## Guiding Principles + +1. **Idempotent DDL**: Every `CREATE` or `ALTER` statement must tolerate repeated execution. Use `IF NOT EXISTS` guards or existence checks (`information_schema`) where necessary. +2. **Order of Operations**: Create reference tables first, then update dependent tables, finally enforce foreign keys and constraints. +3. **Data Safety**: Default data seeded by migrations should be minimal and in ASCII-only form to avoid encoding issues in various shells and CI logs. +4. **Compatibility**: The baseline must reflect the schema shape expected by the current SQLAlchemy models, API routes, and seeding scripts. + +## Schema Elements to Include + +### 1. `currency` Table + +- Columns: `id SERIAL PRIMARY KEY`, `code VARCHAR(3) UNIQUE NOT NULL`, `name VARCHAR(128) NOT NULL`, `symbol VARCHAR(8)`, `is_active BOOLEAN NOT NULL DEFAULT TRUE`. +- Index: implicit via unique constraint on `code`. +- Seed rows matching `scripts.seed_data.CURRENCY_SEEDS` (ASCII-only symbols such as `USD$`, `CAD$`). +- Upsert logic using `ON CONFLICT (code) DO UPDATE` to keep names/symbols in sync when rerun. + +### 2. Currency Integration for CAPEX/OPEX + +- Add `currency_id INTEGER` columns with `IF NOT EXISTS` guards. +- Populate `currency_id` from legacy `currency_code` if the column exists. +- Default null `currency_id` values to the USD row, then `ALTER` to `SET NOT NULL`. +- Create `fk_capex_currency` and `fk_opex_currency` constraints with `ON DELETE RESTRICT` semantics. +- Drop legacy `currency_code` column if it exists (safe because new column holds data). + +### 3. Measurement Metadata on Consumption/Production + +- Ensure `consumption` and `production_output` tables have `unit_name VARCHAR(64)` and `unit_symbol VARCHAR(16)` columns with `IF NOT EXISTS` guards. + +### 4. `measurement_unit` Reference Table + +- Columns: `id SERIAL PRIMARY KEY`, `code VARCHAR(64) UNIQUE NOT NULL`, `name VARCHAR(128) NOT NULL`, `symbol VARCHAR(16)`, `unit_type VARCHAR(32) NOT NULL`, `is_active BOOLEAN NOT NULL DEFAULT TRUE`, `created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()`, `updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()`. +- Assume a simple trigger to maintain `updated_at` is deferred: automate via application layer later; for now, omit trigger. +- Seed rows matching `MEASUREMENT_UNIT_SEEDS` (ASCII names/symbols). Use `ON CONFLICT (code) DO UPDATE` to keep descriptive fields aligned. + +### 5. Transaction Handling + +- Wrap the main operations in a single `BEGIN; ... COMMIT;` block. +- Use subtransactions (`DO $$ ... $$;`) only where conditional logic is required (e.g., checking column existence before backfill). + +## Migration Tracking Alignment + +- Baseline file will be named `000_base.sql`. After execution, insert a row into `schema_migrations` with filename `000_base.sql` to keep the tracking table aligned. +- Existing migrations (`20251021_add_currency_and_unit_fields.sql`, `20251022_create_currency_table_and_fks.sql`) remain for historical reference but will no longer be applied to new environments once the baseline is present. + +## Next Steps + +1. Draft `000_base.sql` reflecting the steps above. +2. Update `run_migrations` to recognise the baseline file and mark older migrations as applied when the baseline exists. +3. Provide documentation in `docs/quickstart.md` explaining how to reset an environment using the baseline plus seeds. diff --git a/docs/quickstart.md b/docs/quickstart.md index ebf3da8..52d81ce 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -68,13 +68,11 @@ pytest E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests. -## Migrations & Currency Backfill +## Migrations & Baseline -The project includes a referential `currency` table and migration/backfill tooling to normalize legacy currency fields. +A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX. -### Run migrations and backfill (development) - -Configure the granular database settings in your PowerShell session before running migrations. +Configure granular database settings in your PowerShell session before running migrations: ```powershell $env:DATABASE_DRIVER = 'postgresql' @@ -84,14 +82,91 @@ $env:DATABASE_USER = 'calminer' $env:DATABASE_PASSWORD = 's3cret' $env:DATABASE_NAME = 'calminer' $env:DATABASE_SCHEMA = 'public' -python scripts/run_migrations.py -python scripts/backfill_currency.py --dry-run -python scripts/backfill_currency.py --create-missing +python scripts/setup_database.py --run-migrations --seed-data --dry-run +python scripts/setup_database.py --run-migrations --seed-data ``` +The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API. + > ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set. -Use `--dry-run` first to verify what will change. +## Database bootstrap workflow + +Provision or refresh a database instance with `scripts/setup_database.py`. Populate the required environment variables (an example lives at `config/setup_test.env.example`) and run: + +```powershell +# Load test credentials (PowerShell) +Get-Content .\config\setup_test.env.example | + ForEach-Object { + if ($_ -and -not $_.StartsWith('#')) { + $name, $value = $_ -split '=', 2 + Set-Item -Path Env:$name -Value $value + } + } + +# Dry-run to inspect the planned actions +python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v + +# Execute the full workflow +python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v +``` + +Typical log output confirms: + +- Admin and application connections succeed for the supplied credentials. +- Database and role creation are idempotent (`already present` when rerun). +- SQLAlchemy metadata either reports missing tables or `All tables already exist`. +- Migrations list pending files and finish with `Applied N migrations` (a new database reports `Applied 1 migrations` for `000_base.sql`). + +After a successful run the target database contains all application tables plus `schema_migrations`, and that table records each applied migration file. New installations only record `000_base.sql`; upgraded environments retain historical entries alongside the baseline. + +### Seeding reference data + +`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required: + +```powershell +python scripts/seed_data.py --currencies --units --dry-run +python scripts/seed_data.py --currencies --units +``` + +The seeder upserts the canonical currency catalog (`USD`, `EUR`, `CLP`, `RMB`, `GBP`, `CAD`, `AUD`) using ASCII-safe symbols (`USD$`, `EUR`, etc.) and the measurement units referenced by the UI (`tonnes`, `kilograms`, `pounds`, `liters`, `cubic_meters`, `kilowatt_hours`). The setup script invokes the same seeder when `--seed-data` is provided and verifies the expected rows afterward, warning if any are missing or inactive. + +### Rollback guidance + +`scripts/setup_database.py` now tracks compensating actions when it creates the database or application role. If a later step fails, the script replays those rollback actions (dropping the newly created database or role and revoking grants) before exiting. Dry runs never register rollback steps and remain read-only. + +If the script reports that some rollback steps could not complete—for example because a connection cannot be established—rerun the script with `--dry-run` to confirm the desired end state and then apply the outstanding cleanup manually: + +```powershell +python scripts/setup_database.py --ensure-database --ensure-role --dry-run -v + +# Manual cleanup examples when automation cannot connect +psql -d postgres -c "DROP DATABASE IF EXISTS calminer" +psql -d postgres -c "DROP ROLE IF EXISTS calminer" +``` + +After a failure and rollback, rerun the full setup once the environment issues are resolved. + +### CI pipeline environment + +The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container and runs the setup script twice: once with `--dry-run` to validate the plan and again without it to apply migrations and seeds. No external secrets are required; the workflow sets the following environment variables for both invocations and for pytest: + +| Variable | Value | Purpose | +| --- | --- | --- | +| `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script | +| `DATABASE_HOST` | `127.0.0.1` | Points to the linked job service | +| `DATABASE_PORT` | `5432` | Default service port | +| `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow | +| `DATABASE_USER` | `calminer` | Application role used during tests | +| `DATABASE_PASSWORD` | `secret` | Password for both admin and app role | +| `DATABASE_SCHEMA` | `public` | Default schema for the tests | +| `DATABASE_SUPERUSER` | `calminer` | Setup script uses the same role for admin actions | +| `DATABASE_SUPERUSER_PASSWORD` | `secret` | Matches the Postgres service password | +| `DATABASE_SUPERUSER_DB` | `calminer_ci` | Database to connect to for admin operations | + +The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally. + +Because the workflow provisions everything inline, no repository or organization secrets need to be configured for basic CI runs. If you later move the setup step to staging or production pipelines, replace these inline values with secrets managed by the CI platform. ## Database Objects diff --git a/docs/seed_data_plan.md b/docs/seed_data_plan.md new file mode 100644 index 0000000..9fec67f --- /dev/null +++ b/docs/seed_data_plan.md @@ -0,0 +1,78 @@ +# Baseline Seed Data Plan + +This document captures the datasets that should be present in a fresh CalMiner installation and the structure required to manage them through `scripts/seed_data.py`. + +## Currency Catalog + +The `currency` table already exists and is seeded today via `scripts/seed_data.py`. The goal is to keep the canonical list in one place and ensure the default currency (USD) is always active. + +| Code | Name | Symbol | Notes | +| ---- | ------------------- | ------ | ---------------------------------------- | +| USD | US Dollar | $ | Default currency (`DEFAULT_CURRENCY_CODE`) | +| EUR | Euro | EUR symbol | | +| CLP | Chilean Peso | $ | | +| RMB | Chinese Yuan | RMB symbol | | +| GBP | British Pound | GBP symbol | | +| CAD | Canadian Dollar | $ | | +| AUD | Australian Dollar | $ | | + +Seeding behaviour: + +- Upsert by ISO code; keep existing name/symbol when updated manually. +- Ensure `is_active` remains true for USD and defaults to true for new rows. +- Defer to runtime validation in `routes.currencies` for enforcing default behaviour. + +## Measurement Units + +UI routes (`routes/ui.py`) currently rely on the in-memory `MEASUREMENT_UNITS` list to populate dropdowns for consumption and production forms. To make this configurable and available to the API, introduce a dedicated `measurement_unit` table and seed it. + +Proposed schema: + +| Column | Type | Notes | +| ------------- | -------------- | ------------------------------------ | +| id | SERIAL / BIGINT | Primary key. | +| code | TEXT | Stable slug (e.g. `tonnes`). Unique. | +| name | TEXT | Display label. | +| symbol | TEXT | Short symbol (nullable). | +| unit_type | TEXT | Category (`mass`, `volume`, `energy`).| +| is_active | BOOLEAN | Default `true` for soft disabling. | +| created_at | TIMESTAMP | Optional `NOW()` default. | +| updated_at | TIMESTAMP | Optional `NOW()` trigger/default. | + +Initial seed set (mirrors existing UI list plus type categorisation): + +| Code | Name | Symbol | Unit Type | +| --------------- | ---------------- | ------ | --------- | +| tonnes | Tonnes | t | mass | +| kilograms | Kilograms | kg | mass | +| pounds | Pounds | lb | mass | +| liters | Liters | L | volume | +| cubic_meters | Cubic Meters | m3 | volume | +| kilowatt_hours | Kilowatt Hours | kWh | energy | + +Seeding behaviour: + +- Upsert rows by `code`. +- Preserve `unit_type` and `symbol` unless explicitly changed via administration tooling. +- Continue surfacing unit options to the UI by querying this table instead of the static constant. + +## Default Settings + +The application expects certain defaults to exist: + +- **Default currency**: enforced by `routes.currencies._ensure_default_currency`; ensure seeds keep USD active. +- **Fallback measurement unit**: UI currently auto-selects the first option in the list. Once units move to the database, expose an application setting to choose a fallback (future work tracked under "Application Settings management"). + +## Seeding Structure Updates + +To support the datasets above: + +1. Extend `scripts/seed_data.py` with a `SeedDataset` registry so each dataset (currencies, units, future defaults) can declare its loader/upsert function and optional dependencies. +2. Add a `--dataset` CLI selector for targeted seeding while keeping `--all` as the default for `setup_database.py` integrations. +3. Update `scripts/setup_database.py` to: + - Run migration ensuring `measurement_unit` table exists. + - Execute the unit seeder after currencies when `--seed-data` is supplied. + - Verify post-seed counts, logging which dataset was inserted/updated. +4. Adjust UI routes to load measurement units from the database and remove the hard-coded list once the table is available. + +This plan aligns with the TODO item for seeding initial data and lays the groundwork for consolidating migrations around a single baseline file that introduces both the schema and seed data in an idempotent manner. diff --git a/main.py b/main.py index ad006b4..f6a4d06 100644 --- a/main.py +++ b/main.py @@ -13,6 +13,7 @@ from routes.consumption import router as consumption_router from routes.production import router as production_router from routes.equipment import router as equipment_router from routes.reporting import router as reporting_router +from routes.currencies import router as currencies_router from routes.simulations import router as simulations_router from routes.maintenance import router as maintenance_router @@ -41,4 +42,5 @@ app.include_router(production_router) app.include_router(equipment_router) app.include_router(maintenance_router) app.include_router(reporting_router) +app.include_router(currencies_router) app.include_router(ui_router) diff --git a/routes/currencies.py b/routes/currencies.py index d1a86fc..d9a210f 100644 --- a/routes/currencies.py +++ b/routes/currencies.py @@ -1,7 +1,9 @@ -from typing import List, Dict, Any +from typing import Dict, List, Optional -from fastapi import APIRouter, Depends +from fastapi import APIRouter, Depends, HTTPException, Query, status +from pydantic import BaseModel, ConfigDict, Field, field_validator from sqlalchemy.orm import Session +from sqlalchemy.exc import IntegrityError from models.currency import Currency from routes.dependencies import get_db @@ -9,11 +11,181 @@ from routes.dependencies import get_db router = APIRouter(prefix="/api/currencies", tags=["Currencies"]) -@router.get("/", response_model=List[Dict[str, Any]]) -def list_currencies(db: Session = Depends(get_db)): - results = [] - for c in db.query(Currency).filter_by(is_active=True).order_by(Currency.code).all(): - results.append({"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol}) - if not results: - results.append({"id": "USD", "name": "US Dollar (USD)", "symbol": "$"}) - return results +DEFAULT_CURRENCY_CODE = "USD" +DEFAULT_CURRENCY_NAME = "US Dollar" +DEFAULT_CURRENCY_SYMBOL = "$" + + +class CurrencyBase(BaseModel): + name: str = Field(..., min_length=1, max_length=128) + symbol: Optional[str] = Field(default=None, max_length=8) + + @staticmethod + def _normalize_symbol(value: Optional[str]) -> Optional[str]: + if value is None: + return None + value = value.strip() + return value or None + + @field_validator("name") + @classmethod + def _strip_name(cls, value: str) -> str: + return value.strip() + + @field_validator("symbol") + @classmethod + def _strip_symbol(cls, value: Optional[str]) -> Optional[str]: + return cls._normalize_symbol(value) + + +class CurrencyCreate(CurrencyBase): + code: str = Field(..., min_length=3, max_length=3) + is_active: bool = True + + @field_validator("code") + @classmethod + def _normalize_code(cls, value: str) -> str: + return value.strip().upper() + + +class CurrencyUpdate(CurrencyBase): + is_active: Optional[bool] = None + + +class CurrencyActivation(BaseModel): + is_active: bool + + +class CurrencyRead(CurrencyBase): + id: int + code: str + is_active: bool + + model_config = ConfigDict(from_attributes=True) + + +def _ensure_default_currency(db: Session) -> Currency: + existing = ( + db.query(Currency) + .filter(Currency.code == DEFAULT_CURRENCY_CODE) + .one_or_none() + ) + if existing: + return existing + + default_currency = Currency( + code=DEFAULT_CURRENCY_CODE, + name=DEFAULT_CURRENCY_NAME, + symbol=DEFAULT_CURRENCY_SYMBOL, + is_active=True, + ) + db.add(default_currency) + try: + db.commit() + except IntegrityError: + db.rollback() + existing = ( + db.query(Currency) + .filter(Currency.code == DEFAULT_CURRENCY_CODE) + .one() + ) + return existing + db.refresh(default_currency) + return default_currency + + +def _get_currency_or_404(db: Session, code: str) -> Currency: + normalized = code.strip().upper() + currency = ( + db.query(Currency) + .filter(Currency.code == normalized) + .one_or_none() + ) + if currency is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Currency not found") + return currency + + +@router.get("/", response_model=List[CurrencyRead]) +def list_currencies( + include_inactive: bool = Query( + False, description="Include inactive currencies"), + db: Session = Depends(get_db), +): + _ensure_default_currency(db) + query = db.query(Currency) + if not include_inactive: + query = query.filter(Currency.is_active.is_(True)) + currencies = query.order_by(Currency.code).all() + return currencies + + +@router.post("/", response_model=CurrencyRead, status_code=status.HTTP_201_CREATED) +def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)): + code = payload.code + existing = ( + db.query(Currency) + .filter(Currency.code == code) + .one_or_none() + ) + if existing is not None: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Currency '{code}' already exists", + ) + + currency = Currency( + code=code, + name=payload.name, + symbol=CurrencyBase._normalize_symbol(payload.symbol), + is_active=payload.is_active, + ) + db.add(currency) + db.commit() + db.refresh(currency) + return currency + + +@router.put("/{code}", response_model=CurrencyRead) +def update_currency(code: str, payload: CurrencyUpdate, db: Session = Depends(get_db)): + currency = _get_currency_or_404(db, code) + + if payload.name is not None: + setattr(currency, "name", payload.name) + if payload.symbol is not None or payload.symbol == "": + setattr( + currency, + "symbol", + CurrencyBase._normalize_symbol(payload.symbol), + ) + if payload.is_active is not None: + code_value = getattr(currency, "code") + if code_value == DEFAULT_CURRENCY_CODE and payload.is_active is False: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The default currency cannot be deactivated.", + ) + setattr(currency, "is_active", payload.is_active) + + db.add(currency) + db.commit() + db.refresh(currency) + return currency + + +@router.patch("/{code}/activation", response_model=CurrencyRead) +def toggle_currency_activation(code: str, body: CurrencyActivation, db: Session = Depends(get_db)): + currency = _get_currency_or_404(db, code) + code_value = getattr(currency, "code") + if code_value == DEFAULT_CURRENCY_CODE and body.is_active is False: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The default currency cannot be deactivated.", + ) + + setattr(currency, "is_active", body.is_active) + db.add(currency) + db.commit() + db.refresh(currency) + return currency diff --git a/routes/ui.py b/routes/ui.py index ff54d02..a52ae9d 100644 --- a/routes/ui.py +++ b/routes/ui.py @@ -19,6 +19,7 @@ from models.simulation_result import SimulationResult from routes.dependencies import get_db from services.reporting import generate_report from models.currency import Currency +from routes.currencies import DEFAULT_CURRENCY_CODE, _ensure_default_currency CURRENCY_CHOICES: list[Dict[str, Any]] = [ @@ -153,6 +154,38 @@ def _load_currencies(db: Session) -> Dict[str, Any]: return {"currency_options": items} +def _load_currency_settings(db: Session) -> Dict[str, Any]: + _ensure_default_currency(db) + records = db.query(Currency).order_by(Currency.code).all() + currencies: list[Dict[str, Any]] = [] + for record in records: + code_value = getattr(record, "code") + currencies.append( + { + "id": int(getattr(record, "id")), + "code": code_value, + "name": getattr(record, "name"), + "symbol": getattr(record, "symbol"), + "is_active": bool(getattr(record, "is_active", True)), + "is_default": code_value == DEFAULT_CURRENCY_CODE, + } + ) + + active_count = sum(1 for item in currencies if item["is_active"]) + inactive_count = len(currencies) - active_count + + return { + "currencies": currencies, + "currency_stats": { + "total": len(currencies), + "active": active_count, + "inactive": inactive_count, + }, + "default_currency_code": DEFAULT_CURRENCY_CODE, + "currency_api_base": "/api/currencies", + } + + def _load_consumption(db: Session) -> Dict[str, Any]: grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list) for record in ( @@ -637,3 +670,10 @@ async def simulations_view(request: Request, db: Session = Depends(get_db)): async def reporting_view(request: Request, db: Session = Depends(get_db)): """Render the reporting view with scenario KPI summaries.""" return _render(request, "reporting.html", _load_reporting(db)) + + +@router.get("/ui/currencies", response_class=HTMLResponse) +async def currencies_view(request: Request, db: Session = Depends(get_db)): + """Render the currency administration page with full currency context.""" + context = _load_currency_settings(db) + return _render(request, "currencies.html", context) diff --git a/scripts/migrations/000_base.sql b/scripts/migrations/000_base.sql new file mode 100644 index 0000000..7462d68 --- /dev/null +++ b/scripts/migrations/000_base.sql @@ -0,0 +1,142 @@ +-- Baseline migration for CalMiner database schema +-- Date: 2025-10-25 +-- Purpose: Consolidate foundational tables and reference data + +BEGIN; + +-- Currency reference table +CREATE TABLE IF NOT EXISTS currency ( + id SERIAL PRIMARY KEY, + code VARCHAR(3) NOT NULL UNIQUE, + name VARCHAR(128) NOT NULL, + symbol VARCHAR(8), + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +INSERT INTO currency (code, name, symbol, is_active) +VALUES + ('USD', 'United States Dollar', 'USD$', TRUE), + ('EUR', 'Euro', 'EUR', TRUE), + ('CLP', 'Chilean Peso', 'CLP$', TRUE), + ('RMB', 'Chinese Yuan', 'RMB', TRUE), + ('GBP', 'British Pound', 'GBP', TRUE), + ('CAD', 'Canadian Dollar', 'CAD$', TRUE), + ('AUD', 'Australian Dollar', 'AUD$', TRUE) +ON CONFLICT (code) DO UPDATE +SET name = EXCLUDED.name, + symbol = EXCLUDED.symbol, + is_active = EXCLUDED.is_active; + +-- Measurement unit reference table +CREATE TABLE IF NOT EXISTS measurement_unit ( + id SERIAL PRIMARY KEY, + code VARCHAR(64) NOT NULL UNIQUE, + name VARCHAR(128) NOT NULL, + symbol VARCHAR(16), + unit_type VARCHAR(32) NOT NULL, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +INSERT INTO measurement_unit (code, name, symbol, unit_type, is_active) +VALUES + ('tonnes', 'Tonnes', 't', 'mass', TRUE), + ('kilograms', 'Kilograms', 'kg', 'mass', TRUE), + ('pounds', 'Pounds', 'lb', 'mass', TRUE), + ('liters', 'Liters', 'L', 'volume', TRUE), + ('cubic_meters', 'Cubic Meters', 'm3', 'volume', TRUE), + ('kilowatt_hours', 'Kilowatt Hours', 'kWh', 'energy', TRUE) +ON CONFLICT (code) DO UPDATE +SET name = EXCLUDED.name, + symbol = EXCLUDED.symbol, + unit_type = EXCLUDED.unit_type, + is_active = EXCLUDED.is_active; + +-- Consumption and production measurement metadata +ALTER TABLE consumption + ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); +ALTER TABLE consumption + ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); + +ALTER TABLE production_output + ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); +ALTER TABLE production_output + ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); + +-- Currency integration for CAPEX and OPEX +ALTER TABLE capex + ADD COLUMN IF NOT EXISTS currency_id INTEGER; +ALTER TABLE opex + ADD COLUMN IF NOT EXISTS currency_id INTEGER; + +DO $$ +DECLARE + usd_id INTEGER; +BEGIN + -- Ensure currency_id columns align with legacy currency_code values when present + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'capex' AND column_name = 'currency_code' + ) THEN + UPDATE capex AS c + SET currency_id = cur.id + FROM currency AS cur + WHERE c.currency_code = cur.code + AND (c.currency_id IS DISTINCT FROM cur.id); + END IF; + + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'opex' AND column_name = 'currency_code' + ) THEN + UPDATE opex AS o + SET currency_id = cur.id + FROM currency AS cur + WHERE o.currency_code = cur.code + AND (o.currency_id IS DISTINCT FROM cur.id); + END IF; + + SELECT id INTO usd_id FROM currency WHERE code = 'USD'; + IF usd_id IS NOT NULL THEN + UPDATE capex SET currency_id = usd_id WHERE currency_id IS NULL; + UPDATE opex SET currency_id = usd_id WHERE currency_id IS NULL; + END IF; +END $$; + +ALTER TABLE capex + ALTER COLUMN currency_id SET NOT NULL; +ALTER TABLE opex + ALTER COLUMN currency_id SET NOT NULL; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = current_schema() + AND table_name = 'capex' + AND constraint_name = 'fk_capex_currency' + ) THEN + ALTER TABLE capex + ADD CONSTRAINT fk_capex_currency FOREIGN KEY (currency_id) + REFERENCES currency (id) ON DELETE RESTRICT; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = current_schema() + AND table_name = 'opex' + AND constraint_name = 'fk_opex_currency' + ) THEN + ALTER TABLE opex + ADD CONSTRAINT fk_opex_currency FOREIGN KEY (currency_id) + REFERENCES currency (id) ON DELETE RESTRICT; + END IF; +END $$; + +ALTER TABLE capex + DROP COLUMN IF EXISTS currency_code; +ALTER TABLE opex + DROP COLUMN IF EXISTS currency_code; + +COMMIT; diff --git a/scripts/migrations/20251021_add_currency_and_unit_fields.sql b/scripts/migrations/20251021_add_currency_and_unit_fields.sql index 9c17d8d..6eebf25 100644 --- a/scripts/migrations/20251021_add_currency_and_unit_fields.sql +++ b/scripts/migrations/20251021_add_currency_and_unit_fields.sql @@ -7,23 +7,23 @@ BEGIN; -- CAPEX / OPEX ALTER TABLE capex - ADD COLUMN currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; + ADD COLUMN IF NOT EXISTS currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; ALTER TABLE opex - ADD COLUMN currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; + ADD COLUMN IF NOT EXISTS currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; -- Consumption tracking ALTER TABLE consumption - ADD COLUMN unit_name VARCHAR(64); + ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); ALTER TABLE consumption - ADD COLUMN unit_symbol VARCHAR(16); + ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); -- Production output ALTER TABLE production_output - ADD COLUMN unit_name VARCHAR(64); + ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); ALTER TABLE production_output - ADD COLUMN unit_symbol VARCHAR(16); + ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); COMMIT; diff --git a/scripts/seed_data.py b/scripts/seed_data.py new file mode 100644 index 0000000..5c96278 --- /dev/null +++ b/scripts/seed_data.py @@ -0,0 +1,162 @@ +"""Seed baseline data for CalMiner in an idempotent manner. + +Usage examples +-------------- + +```powershell +# Use existing environment variables (or load from setup_test.env.example) +python scripts/seed_data.py --currencies --units --defaults + +# Dry-run to preview actions +python scripts/seed_data.py --currencies --dry-run +``` +""" + +from __future__ import annotations + +import argparse +import logging +import os +from typing import Iterable, Optional + +import psycopg2 +from psycopg2 import errors +from psycopg2.extras import execute_values + +from scripts.setup_database import DatabaseConfig + + +logger = logging.getLogger(__name__) + +CURRENCY_SEEDS = ( + ("USD", "United States Dollar", "USD$", True), + ("EUR", "Euro", "EUR", True), + ("CLP", "Chilean Peso", "CLP$", True), + ("RMB", "Chinese Yuan", "RMB", True), + ("GBP", "British Pound", "GBP", True), + ("CAD", "Canadian Dollar", "CAD$", True), + ("AUD", "Australian Dollar", "AUD$", True), +) + +MEASUREMENT_UNIT_SEEDS = ( + ("tonnes", "Tonnes", "t", "mass", True), + ("kilograms", "Kilograms", "kg", "mass", True), + ("pounds", "Pounds", "lb", "mass", True), + ("liters", "Liters", "L", "volume", True), + ("cubic_meters", "Cubic Meters", "m3", "volume", True), + ("kilowatt_hours", "Kilowatt Hours", "kWh", "energy", True), +) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Seed baseline CalMiner data") + parser.add_argument("--currencies", action="store_true", help="Seed currency table") + parser.add_argument("--units", action="store_true", help="Seed unit table") + parser.add_argument("--defaults", action="store_true", help="Seed default records") + parser.add_argument("--dry-run", action="store_true", help="Print actions without executing") + parser.add_argument( + "--verbose", "-v", action="count", default=0, help="Increase logging verbosity" + ) + return parser.parse_args() + + +def _configure_logging(args: argparse.Namespace) -> None: + level = logging.WARNING - (10 * min(args.verbose, 2)) + logging.basicConfig(level=max(level, logging.INFO), format="%(levelname)s %(message)s") + + +def main() -> None: + args = parse_args() + run_with_namespace(args) + + +def run_with_namespace( + args: argparse.Namespace, + *, + config: Optional[DatabaseConfig] = None, +) -> None: + _configure_logging(args) + + if not any((args.currencies, args.units, args.defaults)): + logger.info("No seeding options provided; exiting") + return + + config = config or DatabaseConfig.from_env() + with psycopg2.connect(config.application_dsn()) as conn: + conn.autocommit = True + with conn.cursor() as cursor: + if args.currencies: + _seed_currencies(cursor, dry_run=args.dry_run) + if args.units: + _seed_units(cursor, dry_run=args.dry_run) + if args.defaults: + _seed_defaults(cursor, dry_run=args.dry_run) + + +def _seed_currencies(cursor, *, dry_run: bool) -> None: + logger.info("Seeding currency table (%d rows)", len(CURRENCY_SEEDS)) + if dry_run: + for code, name, symbol, active in CURRENCY_SEEDS: + logger.info("Dry run: would upsert currency %s (%s)", code, name) + return + + execute_values( + cursor, + """ + INSERT INTO currency (code, name, symbol, is_active) + VALUES %s + ON CONFLICT (code) DO UPDATE + SET name = EXCLUDED.name, + symbol = EXCLUDED.symbol, + is_active = EXCLUDED.is_active + """, + CURRENCY_SEEDS, + ) + logger.info("Currency seed complete") + + +def _seed_units(cursor, *, dry_run: bool) -> None: + total = len(MEASUREMENT_UNIT_SEEDS) + logger.info("Seeding measurement_unit table (%d rows)", total) + if dry_run: + for code, name, symbol, unit_type, _ in MEASUREMENT_UNIT_SEEDS: + logger.info( + "Dry run: would upsert measurement unit %s (%s - %s)", + code, + name, + unit_type, + ) + return + + try: + execute_values( + cursor, + """ + INSERT INTO measurement_unit (code, name, symbol, unit_type, is_active) + VALUES %s + ON CONFLICT (code) DO UPDATE + SET name = EXCLUDED.name, + symbol = EXCLUDED.symbol, + unit_type = EXCLUDED.unit_type, + is_active = EXCLUDED.is_active + """, + MEASUREMENT_UNIT_SEEDS, + ) + except errors.UndefinedTable: + logger.warning( + "measurement_unit table does not exist; skipping unit seeding." + ) + cursor.connection.rollback() + return + + logger.info("Measurement unit seed complete") + + +def _seed_defaults(cursor, *, dry_run: bool) -> None: + logger.info("Seeding default records - not yet implemented") + if dry_run: + return + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/setup_database.py b/scripts/setup_database.py new file mode 100644 index 0000000..cf00b8d --- /dev/null +++ b/scripts/setup_database.py @@ -0,0 +1,1146 @@ +"""Utilities to bootstrap the CalMiner PostgreSQL database. + +This script is designed to be idempotent. Each step checks the existing +state before attempting to modify it so repeated executions are safe. + +Environment variables (with defaults) used when establishing connections: + +* ``DATABASE_DRIVER`` (``postgresql``) +* ``DATABASE_HOST`` (required) +* ``DATABASE_PORT`` (``5432``) +* ``DATABASE_NAME`` (required) +* ``DATABASE_USER`` (required) +* ``DATABASE_PASSWORD`` (optional, required for password auth) +* ``DATABASE_SCHEMA`` (``public``) +* ``DATABASE_ADMIN_URL`` (overrides individual admin settings) +* ``DATABASE_SUPERUSER`` (falls back to ``DATABASE_USER`` or ``postgres``) +* ``DATABASE_SUPERUSER_PASSWORD`` (falls back to ``DATABASE_PASSWORD``) +* ``DATABASE_SUPERUSER_DB`` (``postgres``) + +Set ``DATABASE_URL`` if other parts of the application rely on a single +connection string; this script will still honor the granular inputs above. +""" + +from __future__ import annotations +import argparse +import importlib +import logging +import os +import pkgutil +import sys +from dataclasses import dataclass +from pathlib import Path +from typing import Callable, Optional, cast +from urllib.parse import quote_plus, urlencode +import psycopg2 +from psycopg2 import errors +from psycopg2 import sql +from psycopg2 import extensions +from psycopg2.extensions import connection as PGConnection, parse_dsn +from dotenv import load_dotenv +from sqlalchemy import create_engine, inspect +ROOT_DIR = Path(__file__).resolve().parents[1] +if str(ROOT_DIR) not in sys.path: + sys.path.insert(0, str(ROOT_DIR)) +from config.database import Base + + +logger = logging.getLogger(__name__) + +SCRIPTS_DIR = Path(__file__).resolve().parent +DEFAULT_MIGRATIONS_DIR = SCRIPTS_DIR / "migrations" +MIGRATIONS_TABLE = "schema_migrations" + + +@dataclass(slots=True) +class DatabaseConfig: + """Configuration required to manage the application database.""" + + driver: str + host: str + port: int + database: str + user: str + password: Optional[str] + schema: Optional[str] + + admin_user: str + admin_password: Optional[str] + admin_database: str = "postgres" + + @classmethod + def from_env( + cls, + overrides: Optional[dict[str, Optional[str]]] = None, + ) -> "DatabaseConfig": + load_dotenv() + + override_map: dict[str, Optional[str]] = dict(overrides or {}) + + def _get(name: str, default: Optional[str] = None) -> Optional[str]: + if name in override_map and override_map[name] is not None: + return override_map[name] + env_value = os.getenv(name) + if env_value is not None: + return env_value + return default + + driver = _get("DATABASE_DRIVER", "postgresql") + host = _get("DATABASE_HOST") + port_value = _get("DATABASE_PORT", "5432") + database = _get("DATABASE_NAME") + user = _get("DATABASE_USER") + password = _get("DATABASE_PASSWORD") + schema = _get("DATABASE_SCHEMA", "public") + + try: + port = int(port_value) if port_value is not None else 5432 + except ValueError as exc: + raise RuntimeError( + "Invalid DATABASE_PORT value: expected integer, got" + f" '{port_value}'" + ) from exc + + admin_url = _get("DATABASE_ADMIN_URL") + if admin_url: + admin_conninfo = parse_dsn(admin_url) + admin_user = admin_conninfo.get("user") or user or "postgres" + admin_password = admin_conninfo.get("password") + admin_database = admin_conninfo.get("dbname") or "postgres" + host = admin_conninfo.get("host") or host + port = int(admin_conninfo.get("port") or port) + else: + admin_user = _get("DATABASE_SUPERUSER", user or "postgres") + admin_password = _get("DATABASE_SUPERUSER_PASSWORD", password) + admin_database = _get("DATABASE_SUPERUSER_DB", "postgres") + + missing = [ + name + for name, value in ( + ("DATABASE_HOST", host), + ("DATABASE_NAME", database), + ("DATABASE_USER", user), + ) + if not value + ] + if missing: + raise RuntimeError( + "Missing required database configuration: " + + ", ".join(missing) + ) + + host = cast(str, host) + database = cast(str, database) + user = cast(str, user) + driver = cast(str, driver) + admin_user = cast(str, admin_user) + admin_database = cast(str, admin_database) + + return cls( + driver=driver, + host=host, + port=port, + database=database, + user=user, + password=password, + schema=schema, + admin_user=admin_user, + admin_password=admin_password, + admin_database=admin_database, + ) + + def admin_dsn(self, database: Optional[str] = None) -> str: + target_db = database or self.admin_database + return self._compose_url( + user=self.admin_user, + password=self.admin_password, + database=target_db, + schema=None, + ) + + def application_dsn(self) -> str: + """Return a SQLAlchemy URL for connecting as the application role.""" + + return self._compose_url( + user=self.user, + password=self.password, + database=self.database, + schema=self.schema, + ) + + def _compose_url( + self, + *, + user: Optional[str], + password: Optional[str], + database: str, + schema: Optional[str], + ) -> str: + auth = "" + if user: + encoded_user = quote_plus(user) + if password: + encoded_pass = quote_plus(password) + auth = f"{encoded_user}:{encoded_pass}@" + else: + auth = f"{encoded_user}@" + + host = self.host + if ":" in host and not host.startswith("["): + host = f"[{host}]" + + host_port = host + if self.port: + host_port = f"{host}:{self.port}" + + url = f"{self.driver}://{auth}{host_port}/{database}" + + params = {} + if schema and schema.strip() and schema != "public": + params["options"] = f"-csearch_path={schema}" + + if params: + url = f"{url}?{urlencode(params, quote_via=quote_plus)}" + + return url + + +class DatabaseSetup: + """Encapsulates the full setup workflow.""" + + def __init__(self, config: DatabaseConfig, *, dry_run: bool = False) -> None: + self.config = config + self.dry_run = dry_run + self._models_loaded = False + self._rollback_actions: list[tuple[str, Callable[[], None]]] = [] + def _register_rollback(self, label: str, action: Callable[[], None]) -> None: + if self.dry_run: + return + self._rollback_actions.append((label, action)) + + def execute_rollbacks(self) -> None: + if not self._rollback_actions: + logger.info("No rollback actions registered; nothing to undo.") + return + + logger.warning( + "Attempting rollback of %d action(s)", len(self._rollback_actions) + ) + for label, action in reversed(self._rollback_actions): + try: + logger.warning("Rollback step: %s", label) + action() + except Exception: + logger.exception("Rollback action '%s' failed", label) + self._rollback_actions.clear() + + def clear_rollbacks(self) -> None: + self._rollback_actions.clear() + + + def _describe_connection(self, user: str, database: str) -> str: + return f"{user}@{self.config.host}:{self.config.port}/{database}" + + def validate_admin_connection(self) -> None: + descriptor = self._describe_connection( + self.config.admin_user, self.config.admin_database + ) + logger.info("Validating admin connection (%s)", descriptor) + try: + with self._admin_connection(self.config.admin_database) as conn: + with conn.cursor() as cursor: + cursor.execute("SELECT 1") + except psycopg2.Error as exc: + raise RuntimeError( + "Unable to connect with admin credentials. " + "Check DATABASE_ADMIN_URL or DATABASE_SUPERUSER settings." + f" Target: {descriptor}" + ) from exc + logger.info("Admin connection verified (%s)", descriptor) + + def validate_application_connection(self) -> None: + descriptor = self._describe_connection( + self.config.user, self.config.database + ) + logger.info("Validating application connection (%s)", descriptor) + try: + with self._application_connection() as conn: + with conn.cursor() as cursor: + cursor.execute("SELECT 1") + except psycopg2.Error as exc: + raise RuntimeError( + "Unable to connect using application credentials. " + "Ensure the role exists and credentials are correct. " + f"Target: {descriptor}" + ) from exc + logger.info("Application connection verified (%s)", descriptor) + + def ensure_database(self) -> None: + """Create the target database when it does not already exist.""" + + logger.info("Ensuring database '%s' exists", self.config.database) + try: + conn = self._admin_connection(self.config.admin_database) + except RuntimeError: + logger.error( + "Could not connect to admin database '%s' while creating '%s'.", + self.config.admin_database, + self.config.database, + ) + raise + try: + conn.autocommit = True + conn.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT) + cursor = conn.cursor() + try: + try: + cursor.execute( + "SELECT 1 FROM pg_database WHERE datname = %s", + (self.config.database,), + ) + except psycopg2.Error as exc: + message = ( + "Unable to inspect existing databases while ensuring '%s'." + " Verify admin permissions." + ) % self.config.database + logger.error(message) + raise RuntimeError(message) from exc + + exists = cursor.fetchone() is not None + if exists: + logger.info( + "Database '%s' already present", self.config.database + ) + return + + if self.dry_run: + logger.info( + "Dry run: would create database '%s'. Run without --dry-run to proceed.", + self.config.database, + ) + return + + try: + cursor.execute( + sql.SQL("CREATE DATABASE {} ENCODING 'UTF8'").format( + sql.Identifier(self.config.database) + ) + ) + except psycopg2.Error as exc: + message = ( + "Failed to create database '%s'. Rerun with --dry-run for diagnostics" + ) % self.config.database + logger.error(message) + raise RuntimeError(message) from exc + else: + rollback_label = f"drop database {self.config.database}" + self._register_rollback( + rollback_label, + lambda db=self.config.database: self._drop_database(db), + ) + logger.info("Created database '%s'", self.config.database) + finally: + cursor.close() + finally: + conn.close() + + def ensure_role(self) -> None: + """Create the application role and assign privileges when missing.""" + + logger.info("Ensuring role '%s' exists", self.config.user) + try: + admin_conn = self._admin_connection(self.config.admin_database) + except RuntimeError: + logger.error( + "Unable to connect with admin credentials while ensuring role '%s'", + self.config.user, + ) + raise + + with admin_conn as conn: + conn.autocommit = True + with conn.cursor() as cursor: + try: + cursor.execute( + "SELECT 1 FROM pg_roles WHERE rolname = %s", + (self.config.user,), + ) + except psycopg2.Error as exc: + message = ( + "Unable to inspect existing roles while ensuring role '%s'." + " Verify admin permissions." + ) % self.config.user + logger.error(message) + raise RuntimeError(message) from exc + role_exists = cursor.fetchone() is not None + if not role_exists: + logger.info("Creating role '%s'", self.config.user) + if self.dry_run: + logger.info( + "Dry run: would create role '%s'. Run without --dry-run to apply.", + self.config.user, + ) + return + try: + if self.config.password: + cursor.execute( + sql.SQL("CREATE ROLE {} WITH LOGIN PASSWORD %s").format( + sql.Identifier(self.config.user) + ), + (self.config.password,), + ) + else: + cursor.execute( + sql.SQL("CREATE ROLE {} WITH LOGIN").format( + sql.Identifier(self.config.user) + ) + ) + except psycopg2.Error as exc: + message = ( + "Failed to create role '%s'. Review admin privileges and rerun." + ) % self.config.user + logger.error(message) + raise RuntimeError(message) from exc + else: + rollback_label = f"drop role {self.config.user}" + self._register_rollback( + rollback_label, + lambda role=self.config.user: self._drop_role(role), + ) + else: + logger.info("Role '%s' already present", self.config.user) + + try: + role_conn = self._admin_connection(self.config.database) + except RuntimeError: + logger.error( + "Unable to connect to application database '%s' while granting privileges to role '%s'", + self.config.database, + self.config.user, + ) + raise + + if self.dry_run: + logger.info( + "Dry run: would grant privileges on schema/database to role '%s'.", + self.config.user, + ) + return + + with role_conn as conn: + conn.autocommit = True + with conn.cursor() as cursor: + schema_name = self.config.schema or "public" + schema_identifier = sql.Identifier(schema_name) + role_identifier = sql.Identifier(self.config.user) + + try: + cursor.execute( + sql.SQL("GRANT CONNECT ON DATABASE {} TO {}").format( + sql.Identifier(self.config.database), + role_identifier, + ) + ) + cursor.execute( + sql.SQL("GRANT USAGE ON SCHEMA {} TO {}").format( + schema_identifier, + role_identifier, + ) + ) + cursor.execute( + sql.SQL("GRANT CREATE ON SCHEMA {} TO {}").format( + schema_identifier, + role_identifier, + ) + ) + cursor.execute( + sql.SQL( + "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} TO {}" + ).format( + schema_identifier, + role_identifier, + ) + ) + cursor.execute( + sql.SQL( + "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} TO {}" + ).format( + schema_identifier, + role_identifier, + ) + ) + cursor.execute( + sql.SQL( + "ALTER DEFAULT PRIVILEGES IN SCHEMA {} GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO {}" + ).format( + schema_identifier, + role_identifier, + ) + ) + cursor.execute( + sql.SQL( + "ALTER DEFAULT PRIVILEGES IN SCHEMA {} GRANT USAGE, SELECT ON SEQUENCES TO {}" + ).format( + schema_identifier, + role_identifier, + ) + ) + except psycopg2.Error as exc: + message = ( + "Failed to grant privileges to role '%s' in schema '%s'." + " Rerun with --dry-run for more context." + ) % (self.config.user, schema_name) + logger.error(message) + raise RuntimeError(message) from exc + logger.info( + "Granted privileges on schema '%s' to role '%s'", + schema_name, + self.config.user, + ) + rollback_label = f"revoke privileges for {self.config.user}" + self._register_rollback( + rollback_label, + lambda schema=schema_name: self._revoke_role_privileges( + schema_name=schema + ), + ) + + def ensure_schema(self) -> None: + """Create the configured schema when it does not exist.""" + + schema_name = self.config.schema + if not schema_name or schema_name == "public": + logger.info("Using default schema 'public'; nothing to ensure") + return + + logger.info("Ensuring schema '%s' exists", schema_name) + with self._admin_connection(self.config.database) as conn: + conn.autocommit = True + with conn.cursor() as cursor: + cursor.execute( + sql.SQL( + "SELECT 1 FROM information_schema.schemata WHERE schema_name = %s" + ), + (schema_name,), + ) + exists = cursor.fetchone() is not None + if not exists: + if self.dry_run: + logger.info( + "Dry run: would create schema '%s'", + schema_name, + ) + else: + cursor.execute( + sql.SQL("CREATE SCHEMA {}").format( + sql.Identifier(schema_name) + ) + ) + logger.info("Created schema '%s'", schema_name) + try: + if self.dry_run: + logger.info( + "Dry run: would set schema '%s' owner to '%s'", + schema_name, + self.config.user, + ) + else: + cursor.execute( + sql.SQL("ALTER SCHEMA {} OWNER TO {}").format( + sql.Identifier(schema_name), + sql.Identifier(self.config.user), + ) + ) + except errors.UndefinedObject: + logger.warning( + "Role '%s' not found when assigning ownership to schema '%s'." + " Run --ensure-role after creating the schema.", + self.config.user, + schema_name, + ) + + def _admin_connection(self, database: Optional[str] = None) -> PGConnection: + target_db = database or self.config.admin_database + dsn = self.config.admin_dsn(database) + descriptor = self._describe_connection( + self.config.admin_user, target_db + ) + try: + return psycopg2.connect(dsn) + except psycopg2.Error as exc: + raise RuntimeError( + "Unable to establish admin connection. " + f"Target: {descriptor}" + ) from exc + + def _application_connection(self) -> PGConnection: + dsn = self.config.application_dsn() + descriptor = self._describe_connection( + self.config.user, self.config.database + ) + try: + return psycopg2.connect(dsn) + except psycopg2.Error as exc: + raise RuntimeError( + "Unable to establish application connection. " + f"Target: {descriptor}" + ) from exc + + def initialize_schema(self) -> None: + """Create database objects from SQLAlchemy metadata if missing.""" + + self._ensure_models_loaded() + logger.info("Ensuring SQLAlchemy metadata is reflected in database") + engine = create_engine(self.config.application_dsn(), future=True) + try: + inspector = inspect(engine) + existing_tables = set( + inspector.get_table_names(schema=self.config.schema) + ) + metadata_tables = set(Base.metadata.tables.keys()) + missing_tables = sorted(metadata_tables - existing_tables) + + if missing_tables: + logger.info("Pending tables: %s", ", ".join(missing_tables)) + else: + logger.info("All tables already exist") + + if self.dry_run: + if missing_tables: + logger.info("Dry run: skipping creation of pending tables") + return + + Base.metadata.create_all(bind=engine, checkfirst=True) + finally: + engine.dispose() + + logger.info("Schema initialization complete") + + def _ensure_models_loaded(self) -> None: + if self._models_loaded: + return + + package = importlib.import_module("models") + for module_info in pkgutil.iter_modules(package.__path__): + importlib.import_module(f"{package.__name__}.{module_info.name}") + self._models_loaded = True + + def run_migrations(self, migrations_dir: Optional[Path | str] = None) -> None: + """Execute pending SQL migrations in chronological order.""" + + directory = ( + Path(migrations_dir) + if migrations_dir is not None + else DEFAULT_MIGRATIONS_DIR + ) + directory = directory.resolve() + + if not directory.exists(): + logger.warning("Migrations directory '%s' not found", directory) + return + + migration_files = sorted(directory.glob("*.sql")) + if not migration_files: + logger.info("No migration scripts found in '%s'", directory) + return + + baseline_name = "000_base.sql" + baseline_path = directory / baseline_name + + schema_name = self.config.schema or "public" + + with self._application_connection() as conn: + conn.autocommit = True + with conn.cursor() as cursor: + table_exists = self._migrations_table_exists( + cursor, schema_name) + if not table_exists: + if self.dry_run: + logger.info( + "Dry run: would create migration history table %s.%s", + schema_name, + MIGRATIONS_TABLE, + ) + applied: set[str] = set() + else: + self._create_migrations_table(cursor, schema_name) + logger.info( + "Created migration history table %s.%s", + schema_name, + MIGRATIONS_TABLE, + ) + applied = set() + else: + applied = self._fetch_applied_migrations( + cursor, schema_name) + + if ( + baseline_path.exists() + and baseline_name not in applied + ): + if self.dry_run: + logger.info( + "Dry run: baseline migration '%s' pending; would apply and mark legacy files", + baseline_name, + ) + else: + logger.info( + "Baseline migration '%s' pending; applying and marking older migrations", + baseline_name, + ) + try: + baseline_applied = self._apply_migration_file( + cursor, schema_name, baseline_path + ) + except Exception: + logger.error( + "Failed while applying baseline migration '%s'." + " Review the migration contents and rerun with --dry-run for diagnostics.", + baseline_name, + exc_info=True, + ) + raise + applied.add(baseline_applied) + legacy_files = [ + path + for path in migration_files + if path.name != baseline_name + ] + for legacy in legacy_files: + if legacy.name not in applied: + try: + cursor.execute( + sql.SQL( + "INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())" + ).format( + sql.Identifier( + schema_name, + MIGRATIONS_TABLE, + ) + ), + (legacy.name,), + ) + except Exception: + logger.error( + "Unable to record legacy migration '%s' after baseline application." + " Check schema_migrations table in schema '%s' for partial state.", + legacy.name, + schema_name, + exc_info=True, + ) + raise + applied.add(legacy.name) + logger.info( + "Marked legacy migration '%s' as applied via baseline", + legacy.name, + ) + + pending = [ + path + for path in migration_files + if path.name not in applied + ] + + if not pending: + logger.info("No pending migrations") + return + + logger.info( + "Pending migrations: %s", + ", ".join(path.name for path in pending), + ) + + if self.dry_run: + logger.info("Dry run: skipping migration execution") + return + + for path in pending: + self._apply_migration_file(cursor, schema_name, path) + + logger.info("Applied %d migrations", len(pending)) + + def _apply_migration_file( + self, + cursor, + schema_name: str, + path: Path, + ) -> str: + logger.info("Applying migration '%s'", path.name) + sql_text = path.read_text(encoding="utf-8") + try: + cursor.execute(sql_text) + cursor.execute( + sql.SQL( + "INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())" + ).format( + sql.Identifier(schema_name, MIGRATIONS_TABLE) + ), + (path.name,), + ) + return path.name + except Exception: + logger.exception("Failed to apply migration '%s'", path.name) + raise + + def _migrations_table_exists(self, cursor, schema_name: str) -> bool: + cursor.execute( + """ + SELECT 1 + FROM information_schema.tables + WHERE table_schema = %s AND table_name = %s + """, + (schema_name, MIGRATIONS_TABLE), + ) + return cursor.fetchone() is not None + + def _create_migrations_table(self, cursor, schema_name: str) -> None: + cursor.execute( + sql.SQL( + "CREATE TABLE IF NOT EXISTS {} (" + "filename TEXT PRIMARY KEY," + "applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()" + ")" + ).format( + sql.Identifier(schema_name, MIGRATIONS_TABLE) + ) + ) + + def _fetch_applied_migrations(self, cursor, schema_name: str) -> set[str]: + cursor.execute( + sql.SQL("SELECT filename FROM {} ORDER BY filename").format( + sql.Identifier(schema_name, MIGRATIONS_TABLE) + ) + ) + return {row[0] for row in cursor.fetchall()} + + def seed_baseline_data(self, *, dry_run: bool) -> None: + """Seed reference data such as currencies.""" + + from scripts import seed_data + + seed_args = argparse.Namespace( + currencies=True, + units=True, + defaults=False, + dry_run=dry_run, + verbose=0, + ) + seed_data.run_with_namespace(seed_args, config=self.config) + + if dry_run: + logger.info("Dry run: skipped seed verification") + return + + expected_currencies = { + code for code, *_ in getattr(seed_data, "CURRENCY_SEEDS", ()) + } + expected_units = { + code + for code, *_ in getattr(seed_data, "MEASUREMENT_UNIT_SEEDS", ()) + } + self._verify_seeded_data( + expected_currency_codes=expected_currencies, + expected_unit_codes=expected_units, + ) + + def _verify_seeded_data( + self, + *, + expected_currency_codes: set[str], + expected_unit_codes: set[str], + ) -> None: + if not expected_currency_codes and not expected_unit_codes: + logger.info("No seed datasets configured for verification") + return + + with self._application_connection() as conn: + with conn.cursor() as cursor: + if expected_currency_codes: + cursor.execute( + "SELECT code, is_active FROM currency WHERE code = ANY(%s)", + (list(expected_currency_codes),), + ) + rows = cursor.fetchall() + found_codes = {row[0] for row in rows} + missing_codes = sorted( + expected_currency_codes - found_codes + ) + if missing_codes: + message = ( + "Missing expected currencies after seeding: %s. " + "Run scripts/seed_data.py --currencies to restore them." + ) % ", ".join(missing_codes) + logger.error(message) + raise RuntimeError(message) + + logger.info( + "Verified %d seeded currencies present", + len(found_codes), + ) + + default_status = next( + (row[1] for row in rows if row[0] == "USD"), None + ) + if default_status is False: + message = ( + "Default currency 'USD' is inactive after seeding. " + "Reactivate it or rerun the seeding command." + ) + logger.error(message) + raise RuntimeError(message) + elif default_status is None: + message = ( + "Default currency 'USD' not found after seeding. " + "Ensure baseline migration 000_base.sql ran successfully." + ) + logger.error(message) + raise RuntimeError(message) + else: + logger.info("Verified default currency 'USD' active") + + if expected_unit_codes: + try: + cursor.execute( + "SELECT code, is_active FROM measurement_unit WHERE code = ANY(%s)", + (list(expected_unit_codes),), + ) + except errors.UndefinedTable: + conn.rollback() + message = ( + "measurement_unit table not found during seed verification. " + "Ensure baseline migration 000_base.sql has been applied." + ) + logger.error(message) + raise RuntimeError(message) + else: + rows = cursor.fetchall() + found_units = {row[0] for row in rows} + missing_units = sorted( + expected_unit_codes - found_units + ) + if missing_units: + message = ( + "Missing expected measurement units after seeding: %s. " + "Run scripts/seed_data.py --units to restore them." + ) % ", ".join(missing_units) + logger.error(message) + raise RuntimeError(message) + + inactive_units = sorted( + row[0] for row in rows if not bool(row[1]) + ) + if inactive_units: + message = ( + "Measurement units inactive after seeding: %s. " + "Reactivate them or rerun unit seeding." + ) % ", ".join(inactive_units) + logger.error(message) + raise RuntimeError(message) + + logger.info( + "Verified %d measurement units present", + len(found_units), + ) + + logger.info("Seed verification complete") + + def _drop_database(self, database: str) -> None: + logger.warning("Rollback: dropping database '%s'", database) + with self._admin_connection(self.config.admin_database) as conn: + conn.autocommit = True + with conn.cursor() as cursor: + cursor.execute( + "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = %s", + (database,), + ) + cursor.execute( + sql.SQL("DROP DATABASE IF EXISTS {}" ).format( + sql.Identifier(database) + ) + ) + + def _drop_role(self, role: str) -> None: + logger.warning("Rollback: dropping role '%s'", role) + with self._admin_connection(self.config.admin_database) as conn: + conn.autocommit = True + with conn.cursor() as cursor: + cursor.execute( + sql.SQL("DROP ROLE IF EXISTS {}" ).format( + sql.Identifier(role) + ) + ) + + def _revoke_role_privileges(self, *, schema_name: str) -> None: + logger.warning( + "Rollback: revoking privileges on schema '%s' for role '%s'", + schema_name, + self.config.user, + ) + with self._admin_connection(self.config.database) as conn: + conn.autocommit = True + with conn.cursor() as cursor: + cursor.execute( + sql.SQL("REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} FROM {}" ).format( + sql.Identifier(schema_name), + sql.Identifier(self.config.user) + ) + ) + cursor.execute( + sql.SQL("REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} FROM {}" ).format( + sql.Identifier(schema_name), + sql.Identifier(self.config.user) + ) + ) + cursor.execute( + sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM {}" ).format( + sql.Identifier(schema_name), + sql.Identifier(self.config.user) + ) + ) + cursor.execute( + sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE USAGE, SELECT ON SEQUENCES FROM {}" ).format( + sql.Identifier(schema_name), + sql.Identifier(self.config.user) + ) + ) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Bootstrap CalMiner database") + parser.add_argument( + "--ensure-database", + action="store_true", + help="Create the application database when it does not already exist.", + ) + parser.add_argument( + "--ensure-role", + action="store_true", + help="Create the application role and grant necessary privileges.", + ) + parser.add_argument( + "--ensure-schema", + action="store_true", + help="Create the configured schema if it does not exist.", + ) + parser.add_argument( + "--initialize-schema", + action="store_true", + help="Create missing tables based on SQLAlchemy models.", + ) + parser.add_argument( + "--run-migrations", + action="store_true", + help="Execute pending SQL migrations.", + ) + parser.add_argument( + "--seed-data", + action="store_true", + help="Seed baseline reference data (currencies, etc.).", + ) + parser.add_argument( + "--migrations-dir", + default=None, + help="Override the default migrations directory.", + ) + parser.add_argument("--db-driver", help="Override DATABASE_DRIVER") + parser.add_argument("--db-host", help="Override DATABASE_HOST") + parser.add_argument("--db-port", type=int, + help="Override DATABASE_PORT") + parser.add_argument("--db-name", help="Override DATABASE_NAME") + parser.add_argument("--db-user", help="Override DATABASE_USER") + parser.add_argument( + "--db-password", help="Override DATABASE_PASSWORD") + parser.add_argument("--db-schema", help="Override DATABASE_SCHEMA") + parser.add_argument( + "--admin-url", + help="Override DATABASE_ADMIN_URL for administrative operations", + ) + parser.add_argument( + "--admin-user", help="Override DATABASE_SUPERUSER for admin ops") + parser.add_argument( + "--admin-password", + help="Override DATABASE_SUPERUSER_PASSWORD for admin ops", + ) + parser.add_argument( + "--admin-db", + help="Override DATABASE_SUPERUSER_DB for admin ops", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Log actions without applying changes.", + ) + parser.add_argument( + "--verbose", "-v", action="count", default=0, help="Increase logging verbosity" + ) + return parser.parse_args() + + +def main() -> None: + args = parse_args() + level = logging.WARNING - (10 * min(args.verbose, 2)) + logging.basicConfig(level=max(level, logging.INFO), + format="%(levelname)s %(message)s") + + override_args: dict[str, Optional[str]] = { + "DATABASE_DRIVER": args.db_driver, + "DATABASE_HOST": args.db_host, + "DATABASE_NAME": args.db_name, + "DATABASE_USER": args.db_user, + "DATABASE_PASSWORD": args.db_password, + "DATABASE_SCHEMA": args.db_schema, + "DATABASE_ADMIN_URL": args.admin_url, + "DATABASE_SUPERUSER": args.admin_user, + "DATABASE_SUPERUSER_PASSWORD": args.admin_password, + "DATABASE_SUPERUSER_DB": args.admin_db, + } + if args.db_port is not None: + override_args["DATABASE_PORT"] = str(args.db_port) + + config = DatabaseConfig.from_env(overrides=override_args) + setup = DatabaseSetup(config, dry_run=args.dry_run) + + admin_tasks_requested = args.ensure_database or args.ensure_role or args.ensure_schema + application_tasks_requested = args.initialize_schema or args.run_migrations + + if admin_tasks_requested: + setup.validate_admin_connection() + + app_validated = False + + try: + if args.ensure_database: + setup.ensure_database() + if args.ensure_role: + setup.ensure_role() + if args.ensure_schema: + setup.ensure_schema() + + if args.initialize_schema: + if not app_validated and application_tasks_requested: + setup.validate_application_connection() + app_validated = True + setup.initialize_schema() + if args.run_migrations: + if not app_validated and application_tasks_requested: + setup.validate_application_connection() + app_validated = True + migrations_path = Path( + args.migrations_dir) if args.migrations_dir else None + setup.run_migrations(migrations_path) + if args.seed_data: + if not app_validated: + setup.validate_application_connection() + app_validated = True + setup.seed_baseline_data(dry_run=args.dry_run) + except Exception: + if not setup.dry_run: + setup.execute_rollbacks() + raise + finally: + if not setup.dry_run: + setup.clear_rollbacks() + + +if __name__ == "__main__": + main() diff --git a/static/js/currencies.js b/static/js/currencies.js new file mode 100644 index 0000000..86557f7 --- /dev/null +++ b/static/js/currencies.js @@ -0,0 +1,537 @@ +document.addEventListener("DOMContentLoaded", () => { + const dataElement = document.getElementById("currencies-data"); + const editorSection = document.getElementById("currencies-editor"); + const tableBody = document.getElementById("currencies-table-body"); + const tableEmptyState = document.getElementById("currencies-table-empty"); + const metrics = { + total: document.getElementById("currency-metric-total"), + active: document.getElementById("currency-metric-active"), + inactive: document.getElementById("currency-metric-inactive"), + }; + + const form = document.getElementById("currency-form"); + const existingSelect = document.getElementById("currency-form-existing"); + const codeInput = document.getElementById("currency-form-code"); + const nameInput = document.getElementById("currency-form-name"); + const symbolInput = document.getElementById("currency-form-symbol"); + const statusSelect = document.getElementById("currency-form-status"); + const resetButton = document.getElementById("currency-form-reset"); + const feedbackElement = document.getElementById("currency-form-feedback"); + + const saveButton = form ? form.querySelector("button[type='submit']") : null; + + const uppercaseCode = (value) => + (value || "").toString().trim().toUpperCase(); + const normalizeSymbol = (value) => { + if (value === undefined || value === null) { + return null; + } + const trimmed = String(value).trim(); + return trimmed ? trimmed : null; + }; + + const normalizeApiBase = (value) => { + if (!value || typeof value !== "string") { + return "/api/currencies"; + } + return value.endsWith("/") ? value.slice(0, -1) : value; + }; + + let currencies = []; + let apiBase = "/api/currencies"; + let defaultCurrencyCode = "USD"; + + const buildCurrencyRecord = (record) => { + if (!record || typeof record !== "object") { + return null; + } + const code = uppercaseCode(record.code); + return { + id: record.id ?? null, + code, + name: record.name || "", + symbol: record.symbol || "", + is_active: Boolean(record.is_active), + is_default: code === defaultCurrencyCode, + }; + }; + + const findCurrencyIndex = (code) => { + return currencies.findIndex((item) => item.code === code); + }; + + const upsertCurrency = (record) => { + const normalized = buildCurrencyRecord(record); + if (!normalized) { + return null; + } + const existingIndex = findCurrencyIndex(normalized.code); + if (existingIndex >= 0) { + currencies[existingIndex] = normalized; + } else { + currencies.push(normalized); + } + currencies.sort((a, b) => a.code.localeCompare(b.code)); + return normalized; + }; + + const replaceCurrencyList = (records) => { + if (!Array.isArray(records)) { + return; + } + currencies = records + .map((record) => buildCurrencyRecord(record)) + .filter((record) => record !== null) + .sort((a, b) => a.code.localeCompare(b.code)); + }; + + const applyPayload = () => { + if (!dataElement) { + return; + } + try { + const parsed = JSON.parse(dataElement.textContent || "{}"); + if (parsed && typeof parsed === "object") { + if (parsed.default_currency_code) { + defaultCurrencyCode = uppercaseCode(parsed.default_currency_code); + } + if (parsed.currency_api_base) { + apiBase = normalizeApiBase(parsed.currency_api_base); + } + if (Array.isArray(parsed.currencies)) { + replaceCurrencyList(parsed.currencies); + } + } + } catch (error) { + console.error("Unable to parse currencies payload", error); + } + }; + + const showFeedback = (message, type = "success") => { + if (!feedbackElement) { + return; + } + feedbackElement.textContent = message; + feedbackElement.classList.remove("hidden", "success", "error"); + feedbackElement.classList.add(type); + }; + + const hideFeedback = () => { + if (!feedbackElement) { + return; + } + feedbackElement.classList.add("hidden"); + feedbackElement.classList.remove("success", "error"); + feedbackElement.textContent = ""; + }; + + const setButtonLoading = (button, isLoading) => { + if (!button) { + return; + } + button.disabled = isLoading; + button.classList.toggle("is-loading", isLoading); + }; + + const updateMetrics = () => { + const total = currencies.length; + const active = currencies.filter((item) => item.is_active).length; + const inactive = total - active; + if (metrics.total) { + metrics.total.textContent = String(total); + } + if (metrics.active) { + metrics.active.textContent = String(active); + } + if (metrics.inactive) { + metrics.inactive.textContent = String(inactive); + } + }; + + const renderExistingOptions = ( + selectedCode = existingSelect ? existingSelect.value : "" + ) => { + if (!existingSelect) { + return; + } + const placeholder = existingSelect.querySelector("option[value='']"); + const placeholderClone = placeholder ? placeholder.cloneNode(true) : null; + existingSelect.innerHTML = ""; + if (placeholderClone) { + existingSelect.appendChild(placeholderClone); + } + const fragment = document.createDocumentFragment(); + currencies.forEach((currency) => { + const option = document.createElement("option"); + option.value = currency.code; + option.textContent = currency.name + ? `${currency.name} (${currency.code})` + : currency.code; + if (selectedCode === currency.code) { + option.selected = true; + } + fragment.appendChild(option); + }); + existingSelect.appendChild(fragment); + if ( + selectedCode && + !currencies.some((item) => item.code === selectedCode) + ) { + existingSelect.value = ""; + } + }; + + const renderTable = () => { + if (!tableBody) { + return; + } + tableBody.innerHTML = ""; + if (!currencies.length) { + if (tableEmptyState) { + tableEmptyState.classList.remove("hidden"); + } + return; + } + if (tableEmptyState) { + tableEmptyState.classList.add("hidden"); + } + const fragment = document.createDocumentFragment(); + currencies.forEach((currency) => { + const row = document.createElement("tr"); + + const codeCell = document.createElement("td"); + codeCell.textContent = currency.code; + row.appendChild(codeCell); + + const nameCell = document.createElement("td"); + nameCell.textContent = currency.name || "—"; + row.appendChild(nameCell); + + const symbolCell = document.createElement("td"); + symbolCell.textContent = currency.symbol || "—"; + row.appendChild(symbolCell); + + const statusCell = document.createElement("td"); + statusCell.textContent = currency.is_active ? "Active" : "Inactive"; + if (currency.is_default) { + statusCell.textContent += " (Default)"; + } + row.appendChild(statusCell); + + const actionsCell = document.createElement("td"); + const editButton = document.createElement("button"); + editButton.type = "button"; + editButton.className = "btn"; + editButton.dataset.action = "edit"; + editButton.dataset.code = currency.code; + editButton.textContent = "Edit"; + editButton.style.marginRight = "0.5rem"; + + const toggleButton = document.createElement("button"); + toggleButton.type = "button"; + toggleButton.className = "btn"; + toggleButton.dataset.action = "toggle"; + toggleButton.dataset.code = currency.code; + toggleButton.textContent = currency.is_active ? "Deactivate" : "Activate"; + if (currency.is_default && currency.is_active) { + toggleButton.disabled = true; + toggleButton.title = "The default currency must remain active."; + } + + actionsCell.appendChild(editButton); + actionsCell.appendChild(toggleButton); + + row.appendChild(actionsCell); + fragment.appendChild(row); + }); + tableBody.appendChild(fragment); + }; + + const refreshUI = (selectedCode) => { + currencies.sort((a, b) => a.code.localeCompare(b.code)); + renderTable(); + renderExistingOptions(selectedCode); + updateMetrics(); + }; + + const findCurrency = (code) => + currencies.find((item) => item.code === code) || null; + + const setFormForCurrency = (currency) => { + if (!form || !codeInput || !nameInput || !symbolInput || !statusSelect) { + return; + } + if (!currency) { + form.reset(); + if (existingSelect) { + existingSelect.value = ""; + } + codeInput.readOnly = false; + codeInput.value = ""; + nameInput.value = ""; + symbolInput.value = ""; + statusSelect.disabled = false; + statusSelect.value = "true"; + statusSelect.title = ""; + return; + } + + if (existingSelect) { + existingSelect.value = currency.code; + } + codeInput.readOnly = true; + codeInput.value = currency.code; + nameInput.value = currency.name || ""; + symbolInput.value = currency.symbol || ""; + statusSelect.value = currency.is_active ? "true" : "false"; + if (currency.is_default) { + statusSelect.disabled = true; + statusSelect.value = "true"; + statusSelect.title = "The default currency must remain active."; + } else { + statusSelect.disabled = false; + statusSelect.title = ""; + } + }; + + const resetFormState = () => { + setFormForCurrency(null); + }; + + const parseError = async (response, fallbackMessage) => { + try { + const detail = await response.json(); + if (detail && typeof detail === "object" && detail.detail) { + return detail.detail; + } + } catch (error) { + // ignore JSON parse errors + } + return fallbackMessage; + }; + + const fetchCurrenciesFromApi = async () => { + const url = `${apiBase}/?include_inactive=true`; + try { + const response = await fetch(url); + if (!response.ok) { + return; + } + const list = await response.json(); + if (Array.isArray(list)) { + replaceCurrencyList(list); + refreshUI(existingSelect ? existingSelect.value : undefined); + } + } catch (error) { + console.warn("Unable to refresh currency list", error); + } + }; + + const handleSubmit = async (event) => { + event.preventDefault(); + hideFeedback(); + if (!form || !codeInput || !nameInput || !statusSelect) { + return; + } + + const editingCode = existingSelect + ? uppercaseCode(existingSelect.value) + : ""; + const codeValue = uppercaseCode(codeInput.value); + const nameValue = (nameInput.value || "").trim(); + const symbolValue = normalizeSymbol(symbolInput ? symbolInput.value : ""); + const isActive = statusSelect.value !== "false"; + + if (!nameValue) { + showFeedback("Provide a currency name.", "error"); + return; + } + + if (!editingCode) { + if (!codeValue || codeValue.length !== 3) { + showFeedback("Provide a three-letter currency code.", "error"); + return; + } + } + + const payload = editingCode + ? { + name: nameValue, + symbol: symbolValue, + is_active: isActive, + } + : { + code: codeValue, + name: nameValue, + symbol: symbolValue, + is_active: isActive, + }; + + const targetCode = editingCode || codeValue; + const url = editingCode + ? `${apiBase}/${encodeURIComponent(editingCode)}` + : `${apiBase}/`; + + setButtonLoading(saveButton, true); + try { + const response = await fetch(url, { + method: editingCode ? "PUT" : "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }); + + if (!response.ok) { + const message = await parseError( + response, + editingCode + ? "Unable to update the currency." + : "Unable to create the currency." + ); + throw new Error(message); + } + + const result = await response.json(); + const updated = upsertCurrency(result); + defaultCurrencyCode = uppercaseCode(defaultCurrencyCode); + refreshUI(updated ? updated.code : targetCode); + + if (editingCode) { + showFeedback("Currency updated successfully."); + if (updated) { + setFormForCurrency(updated); + } + } else { + showFeedback("Currency created successfully."); + resetFormState(); + } + } catch (error) { + showFeedback(error.message || "An unexpected error occurred.", "error"); + } finally { + setButtonLoading(saveButton, false); + } + }; + + const handleToggle = async (code, button) => { + const record = findCurrency(code); + if (!record) { + return; + } + hideFeedback(); + const nextState = !record.is_active; + const url = `${apiBase}/${encodeURIComponent(code)}/activation`; + setButtonLoading(button, true); + try { + const response = await fetch(url, { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ is_active: nextState }), + }); + + if (!response.ok) { + const message = await parseError( + response, + nextState + ? "Unable to activate the currency." + : "Unable to deactivate the currency." + ); + throw new Error(message); + } + + const result = await response.json(); + const updated = upsertCurrency(result); + refreshUI(updated ? updated.code : code); + if (existingSelect && existingSelect.value === code && updated) { + setFormForCurrency(updated); + } + const actionMessage = nextState + ? `Currency ${code} activated.` + : `Currency ${code} deactivated.`; + showFeedback(actionMessage); + } catch (error) { + showFeedback(error.message || "An unexpected error occurred.", "error"); + } finally { + setButtonLoading(button, false); + } + }; + + const handleTableClick = (event) => { + const button = event.target.closest("button[data-action]"); + if (!button) { + return; + } + const code = uppercaseCode(button.dataset.code); + const action = button.dataset.action; + if (!code || !action) { + return; + } + if (action === "edit") { + const currency = findCurrency(code); + if (currency) { + setFormForCurrency(currency); + hideFeedback(); + if (nameInput) { + nameInput.focus(); + } + } + } else if (action === "toggle") { + handleToggle(code, button); + } + }; + + applyPayload(); + if (editorSection && editorSection.dataset.defaultCode) { + defaultCurrencyCode = uppercaseCode(editorSection.dataset.defaultCode); + currencies = currencies.map((record) => { + return record + ? { + ...record, + is_default: record.code === defaultCurrencyCode, + } + : record; + }); + } + apiBase = normalizeApiBase(apiBase); + + refreshUI(); + + if (form) { + form.addEventListener("submit", handleSubmit); + } + + if (existingSelect) { + existingSelect.addEventListener("change", (event) => { + const selectedCode = uppercaseCode(event.target.value); + if (!selectedCode) { + hideFeedback(); + resetFormState(); + return; + } + const currency = findCurrency(selectedCode); + if (currency) { + setFormForCurrency(currency); + hideFeedback(); + } + }); + } + + if (resetButton) { + resetButton.addEventListener("click", (event) => { + event.preventDefault(); + hideFeedback(); + resetFormState(); + }); + } + + if (codeInput) { + codeInput.addEventListener("input", () => { + const value = uppercaseCode(codeInput.value).slice(0, 3); + codeInput.value = value; + }); + } + + if (tableBody) { + tableBody.addEventListener("click", handleTableClick); + } + + fetchCurrenciesFromApi(); +}); diff --git a/templates/currencies.html b/templates/currencies.html new file mode 100644 index 0000000..6c99515 --- /dev/null +++ b/templates/currencies.html @@ -0,0 +1,131 @@ +{% extends "base.html" %} +{% from "partials/components.html" import select_field, feedback, empty_state, table_container with context %} + +{% block title %}Currencies · CalMiner{% endblock %} + +{% block content %} +
+
+
+

Currency Overview

+

+ Current availability of currencies for project inputs. +

+
+
+ + {% if currency_stats %} +
+
+ Total Currencies + {{ currency_stats.total }} +
+
+ Active + {{ currency_stats.active }} +
+
+ Inactive + {{ currency_stats.inactive }} +
+
+ {% else %} {{ empty_state("currencies-overview-empty", "No currency data + available yet.") }} {% endif %} {% call table_container( + "currencies-table-container", aria_label="Configured currencies", + heading="Configured Currencies" ) %} + + + Code + Name + Symbol + Status + Actions + + + + {% endcall %} {{ empty_state( "currencies-table-empty", "No currencies + configured yet.", hidden=currencies|length > 0 ) }} +
+ +
+
+
+

Manage Currencies

+

+ Create new currencies or update existing configurations inline. +

+
+
+ + {% set status_options = [ {"id": "true", "name": "Active"}, {"id": "false", + "name": "Inactive"} ] %} + +
+ {{ select_field( "Currency to update (leave blank for new)", + "currency-form-existing", name="existing_code", options=currencies, + placeholder="Create a new currency", value_attr="code", label_attr="name" ) + }} + + + + + + + + {{ select_field( "Status", "currency-form-status", name="is_active", + options=status_options, include_blank=False ) }} + +
+ + +
+
+ {{ feedback("currency-form-feedback") }} +
+{% endblock %} {% block scripts %} {{ super() }} + + +{% endblock %} diff --git a/templates/partials/base_footer.html b/templates/partials/base_footer.html index 1d204e0..de97869 100644 --- a/templates/partials/base_footer.html +++ b/templates/partials/base_footer.html @@ -1,5 +1,8 @@ diff --git a/templates/partials/base_header.html b/templates/partials/base_header.html index 1140a18..a8d67e2 100644 --- a/templates/partials/base_header.html +++ b/templates/partials/base_header.html @@ -2,6 +2,7 @@ ("/", "Dashboard"), ("/ui/scenarios", "Scenarios"), ("/ui/parameters", "Parameters"), + ("/ui/currencies", "Currencies"), ("/ui/costs", "Costs"), ("/ui/consumption", "Consumption"), ("/ui/production", "Production"), diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index f3a9d4b..011d6c7 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -64,6 +64,40 @@ def live_server() -> Generator[str, None, None]: process.wait(timeout=5) +@pytest.fixture(scope="session", autouse=True) +def seed_default_currencies(live_server: str) -> None: + """Ensure a baseline set of currencies exists for UI flows.""" + + seeds = [ + {"code": "EUR", "name": "Euro", "symbol": "EUR", "is_active": True}, + {"code": "CLP", "name": "Chilean Peso", "symbol": "CLP$", "is_active": True}, + ] + + with httpx.Client(base_url=live_server, timeout=5.0) as client: + try: + response = client.get("/api/currencies/?include_inactive=true") + response.raise_for_status() + existing_codes = { + str(item.get("code")) + for item in response.json() + if isinstance(item, dict) and item.get("code") + } + except httpx.HTTPError as exc: # noqa: BLE001 + raise RuntimeError("Failed to read existing currencies") from exc + + for payload in seeds: + if payload["code"] in existing_codes: + continue + try: + create_response = client.post("/api/currencies/", json=payload) + except httpx.HTTPError as exc: # noqa: BLE001 + raise RuntimeError("Failed to seed currencies") from exc + + if create_response.status_code == 409: + continue + create_response.raise_for_status() + + @pytest.fixture(scope="session") def playwright_instance() -> Generator[Playwright, None, None]: """Provide a Playwright instance for the test session.""" diff --git a/tests/e2e/test_currencies.py b/tests/e2e/test_currencies.py new file mode 100644 index 0000000..b467ad1 --- /dev/null +++ b/tests/e2e/test_currencies.py @@ -0,0 +1,130 @@ +import random +import string + +from playwright.sync_api import Page, expect + + +def _unique_currency_code(existing: set[str]) -> str: + """Generate a unique three-letter code not present in *existing*.""" + alphabet = string.ascii_uppercase + for _ in range(100): + candidate = "".join(random.choices(alphabet, k=3)) + if candidate not in existing and candidate != "USD": + return candidate + raise AssertionError( + "Unable to generate a unique currency code for the test run.") + + +def _metric_value(page: Page, element_id: str) -> int: + locator = page.locator(f"#{element_id}") + expect(locator).to_be_visible() + return int(locator.inner_text().strip()) + + +def _expect_feedback(page: Page, expected_text: str) -> None: + page.wait_for_function( + "expected => {" + " const el = document.getElementById('currency-form-feedback');" + " if (!el) return false;" + " const text = (el.textContent || '').trim();" + " return !el.classList.contains('hidden') && text === expected;" + "}", + arg=expected_text, + ) + feedback = page.locator("#currency-form-feedback") + expect(feedback).to_have_text(expected_text) + + +def test_currency_workflow_create_update_toggle(page: Page) -> None: + """Exercise create, update, and toggle flows on the currency settings page.""" + page.goto("/ui/currencies") + expect(page).to_have_title("Currencies · CalMiner") + expect(page.locator("h2:has-text('Currency Overview')")).to_be_visible() + + code_cells = page.locator("#currencies-table-body tr td:nth-child(1)") + existing_codes = {text.strip().upper() + for text in code_cells.all_inner_texts()} + + total_before = _metric_value(page, "currency-metric-total") + active_before = _metric_value(page, "currency-metric-active") + inactive_before = _metric_value(page, "currency-metric-inactive") + + new_code = _unique_currency_code(existing_codes) + new_name = f"Test Currency {new_code}" + new_symbol = new_code[0] + + page.fill("#currency-form-code", new_code) + page.fill("#currency-form-name", new_name) + page.fill("#currency-form-symbol", new_symbol) + page.select_option("#currency-form-status", "true") + + with page.expect_response("**/api/currencies/") as create_info: + page.click("button[type='submit']") + create_response = create_info.value + assert create_response.status == 201 + + _expect_feedback(page, "Currency created successfully.") + + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-total').textContent.trim()) === expected", + arg=total_before + 1, + ) + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected", + arg=active_before + 1, + ) + + row = page.locator("#currencies-table-body tr").filter(has_text=new_code) + expect(row).to_be_visible() + expect(row.locator("td").nth(3)).to_have_text("Active") + + # Switch to update mode using the existing currency option. + page.select_option("#currency-form-existing", new_code) + updated_name = f"{new_name} Updated" + updated_symbol = f"{new_symbol}$" + page.fill("#currency-form-name", updated_name) + page.fill("#currency-form-symbol", updated_symbol) + page.select_option("#currency-form-status", "false") + + with page.expect_response(f"**/api/currencies/{new_code}") as update_info: + page.click("button[type='submit']") + update_response = update_info.value + assert update_response.status == 200 + + _expect_feedback(page, "Currency updated successfully.") + + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected", + arg=active_before, + ) + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-inactive').textContent.trim()) === expected", + arg=inactive_before + 1, + ) + + expect(row.locator("td").nth(1)).to_have_text(updated_name) + expect(row.locator("td").nth(2)).to_have_text(updated_symbol) + expect(row.locator("td").nth(3)).to_contain_text("Inactive") + + toggle_button = row.locator("button[data-action='toggle']") + expect(toggle_button).to_have_text("Activate") + + with page.expect_response(f"**/api/currencies/{new_code}/activation") as toggle_info: + toggle_button.click() + toggle_response = toggle_info.value + assert toggle_response.status == 200 + + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected", + arg=active_before + 1, + ) + page.wait_for_function( + "expected => Number(document.getElementById('currency-metric-inactive').textContent.trim()) === expected", + arg=inactive_before, + ) + + _expect_feedback(page, f"Currency {new_code} activated.") + + expect(row.locator("td").nth(3)).to_contain_text("Active") + expect(row.locator("button[data-action='toggle']") + ).to_have_text("Deactivate") diff --git a/tests/e2e/test_smoke.py b/tests/e2e/test_smoke.py index a601dcc..01c0f18 100644 --- a/tests/e2e/test_smoke.py +++ b/tests/e2e/test_smoke.py @@ -14,6 +14,7 @@ UI_ROUTES = [ ("/ui/maintenance", "Maintenance · CalMiner", "Maintenance Schedule"), ("/ui/simulations", "Simulations · CalMiner", "Monte Carlo Simulations"), ("/ui/reporting", "Reporting · CalMiner", "Scenario KPI Summary"), + ("/ui/currencies", "Currencies · CalMiner", "Currency Overview"), ] diff --git a/tests/unit/test_currencies.py b/tests/unit/test_currencies.py new file mode 100644 index 0000000..5aa674c --- /dev/null +++ b/tests/unit/test_currencies.py @@ -0,0 +1,101 @@ +from typing import Dict + +import pytest + +from models.currency import Currency + + +@pytest.fixture(autouse=True) +def _cleanup_currencies(db_session): + db_session.query(Currency).delete() + db_session.commit() + yield + db_session.query(Currency).delete() + db_session.commit() + + +def _assert_currency(payload: Dict[str, object], code: str, name: str, symbol: str | None, is_active: bool) -> None: + assert payload["code"] == code + assert payload["name"] == name + assert payload["is_active"] is is_active + if symbol is None: + assert payload["symbol"] is None + else: + assert payload["symbol"] == symbol + + +def test_list_returns_default_currency(api_client, db_session): + response = api_client.get("/api/currencies/") + assert response.status_code == 200 + data = response.json() + assert any(item["code"] == "USD" for item in data) + + +def test_create_currency_success(api_client, db_session): + payload = {"code": "EUR", "name": "Euro", "symbol": "€", "is_active": True} + response = api_client.post("/api/currencies/", json=payload) + assert response.status_code == 201 + data = response.json() + _assert_currency(data, "EUR", "Euro", "€", True) + + stored = db_session.query(Currency).filter_by(code="EUR").one() + assert stored.name == "Euro" + assert stored.symbol == "€" + assert stored.is_active is True + + +def test_create_currency_conflict(api_client, db_session): + api_client.post( + "/api/currencies/", + json={"code": "CAD", "name": "Canadian Dollar", + "symbol": "$", "is_active": True}, + ) + duplicate = api_client.post( + "/api/currencies/", + json={"code": "CAD", "name": "Canadian Dollar", + "symbol": "$", "is_active": True}, + ) + assert duplicate.status_code == 409 + + +def test_update_currency_fields(api_client, db_session): + api_client.post( + "/api/currencies/", + json={"code": "GBP", "name": "British Pound", + "symbol": "£", "is_active": True}, + ) + + response = api_client.put( + "/api/currencies/GBP", + json={"name": "Pound Sterling", "symbol": "£", "is_active": False}, + ) + assert response.status_code == 200 + data = response.json() + _assert_currency(data, "GBP", "Pound Sterling", "£", False) + + +def test_toggle_currency_activation(api_client, db_session): + api_client.post( + "/api/currencies/", + json={"code": "AUD", "name": "Australian Dollar", + "symbol": "A$", "is_active": True}, + ) + + response = api_client.patch( + "/api/currencies/AUD/activation", + json={"is_active": False}, + ) + assert response.status_code == 200 + data = response.json() + _assert_currency(data, "AUD", "Australian Dollar", "A$", False) + + +def test_default_currency_cannot_be_deactivated(api_client, db_session): + api_client.get("/api/currencies/") + response = api_client.patch( + "/api/currencies/USD/activation", + json={"is_active": False}, + ) + assert response.status_code == 400 + assert response.json()[ + "detail"] == "The default currency cannot be deactivated." diff --git a/tests/unit/test_setup_database.py b/tests/unit/test_setup_database.py new file mode 100644 index 0000000..c67e1ab --- /dev/null +++ b/tests/unit/test_setup_database.py @@ -0,0 +1,459 @@ +import argparse +from unittest import mock + +import psycopg2 +import pytest +from psycopg2 import errors as psycopg_errors + +import scripts.setup_database as setup_db_module + +from scripts import seed_data +from scripts.setup_database import DatabaseConfig, DatabaseSetup + + +@pytest.fixture() +def mock_config() -> DatabaseConfig: + return DatabaseConfig( + driver="postgresql", + host="localhost", + port=5432, + database="calminer_test", + user="calminer", + password="secret", + schema="public", + admin_user="postgres", + admin_password="secret", + ) + + +@pytest.fixture() +def setup_instance(mock_config: DatabaseConfig) -> DatabaseSetup: + return DatabaseSetup(mock_config, dry_run=True) + + +def test_seed_baseline_data_dry_run_skips_verification(setup_instance: DatabaseSetup) -> None: + with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object( + setup_instance, "_verify_seeded_data" + ) as verify_mock: + setup_instance.seed_baseline_data(dry_run=True) + + seed_run.assert_called_once() + namespace_arg = seed_run.call_args[0][0] + assert isinstance(namespace_arg, argparse.Namespace) + assert namespace_arg.dry_run is True + assert namespace_arg.currencies is True + assert namespace_arg.units is True + assert seed_run.call_args.kwargs["config"] is setup_instance.config + verify_mock.assert_not_called() + + +def test_seed_baseline_data_invokes_verification(setup_instance: DatabaseSetup) -> None: + expected_currencies = {code for code, *_ in seed_data.CURRENCY_SEEDS} + expected_units = {code for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS} + + with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object( + setup_instance, "_verify_seeded_data" + ) as verify_mock: + setup_instance.seed_baseline_data(dry_run=False) + + seed_run.assert_called_once() + namespace_arg = seed_run.call_args[0][0] + assert isinstance(namespace_arg, argparse.Namespace) + assert namespace_arg.dry_run is False + assert seed_run.call_args.kwargs["config"] is setup_instance.config + verify_mock.assert_called_once_with( + expected_currency_codes=expected_currencies, + expected_unit_codes=expected_units, + ) + + +def test_run_migrations_applies_baseline_when_missing(mock_config: DatabaseConfig, tmp_path) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + baseline = tmp_path / "000_base.sql" + baseline.write_text("SELECT 1;", encoding="utf-8") + other_migration = tmp_path / "20251022_add_other.sql" + other_migration.write_text("SELECT 2;", encoding="utf-8") + + migration_calls: list[str] = [] + + def capture_migration(cursor, schema_name: str, path): + migration_calls.append(path.name) + return path.name + + connection_mock = mock.MagicMock() + connection_mock.__enter__.return_value = connection_mock + cursor_context = mock.MagicMock() + cursor_mock = mock.MagicMock() + cursor_context.__enter__.return_value = cursor_mock + connection_mock.cursor.return_value = cursor_context + + with mock.patch.object( + setup_instance, "_application_connection", return_value=connection_mock + ), mock.patch.object( + setup_instance, "_migrations_table_exists", return_value=True + ), mock.patch.object( + setup_instance, "_fetch_applied_migrations", return_value=set() + ), mock.patch.object( + setup_instance, "_apply_migration_file", side_effect=capture_migration + ) as apply_mock: + setup_instance.run_migrations(tmp_path) + + assert apply_mock.call_count == 1 + assert migration_calls == ["000_base.sql"] + legacy_marked = any( + call.args[1] == ("20251022_add_other.sql",) + for call in cursor_mock.execute.call_args_list + if len(call.args) == 2 + ) + assert legacy_marked + + +def test_run_migrations_noop_when_all_files_already_applied( + mock_config: DatabaseConfig, tmp_path +) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + baseline = tmp_path / "000_base.sql" + baseline.write_text("SELECT 1;", encoding="utf-8") + other_migration = tmp_path / "20251022_add_other.sql" + other_migration.write_text("SELECT 2;", encoding="utf-8") + + connection_mock, cursor_mock = _connection_with_cursor() + + with mock.patch.object( + setup_instance, "_application_connection", return_value=connection_mock + ), mock.patch.object( + setup_instance, "_migrations_table_exists", return_value=True + ), mock.patch.object( + setup_instance, + "_fetch_applied_migrations", + return_value={"000_base.sql", "20251022_add_other.sql"}, + ), mock.patch.object( + setup_instance, "_apply_migration_file" + ) as apply_mock: + setup_instance.run_migrations(tmp_path) + + apply_mock.assert_not_called() + cursor_mock.execute.assert_not_called() + + +def _connection_with_cursor() -> tuple[mock.MagicMock, mock.MagicMock]: + connection_mock = mock.MagicMock() + connection_mock.__enter__.return_value = connection_mock + cursor_context = mock.MagicMock() + cursor_mock = mock.MagicMock() + cursor_context.__enter__.return_value = cursor_mock + connection_mock.cursor.return_value = cursor_context + return connection_mock, cursor_mock + + +def test_verify_seeded_data_raises_when_currency_missing(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock, cursor_mock = _connection_with_cursor() + cursor_mock.fetchall.return_value = [("USD", True)] + + with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock): + with pytest.raises(RuntimeError) as exc: + setup_instance._verify_seeded_data( + expected_currency_codes={"USD", "EUR"}, + expected_unit_codes=set(), + ) + + assert "EUR" in str(exc.value) + + +def test_verify_seeded_data_raises_when_default_currency_inactive(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock, cursor_mock = _connection_with_cursor() + cursor_mock.fetchall.return_value = [("USD", False)] + + with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock): + with pytest.raises(RuntimeError) as exc: + setup_instance._verify_seeded_data( + expected_currency_codes={"USD"}, + expected_unit_codes=set(), + ) + + assert "inactive" in str(exc.value) + + +def test_verify_seeded_data_raises_when_units_missing(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock, cursor_mock = _connection_with_cursor() + cursor_mock.fetchall.return_value = [("tonnes", True)] + + with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock): + with pytest.raises(RuntimeError) as exc: + setup_instance._verify_seeded_data( + expected_currency_codes=set(), + expected_unit_codes={"tonnes", "liters"}, + ) + + assert "liters" in str(exc.value) + + +def test_verify_seeded_data_raises_when_measurement_table_missing(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock, cursor_mock = _connection_with_cursor() + cursor_mock.execute.side_effect = psycopg_errors.UndefinedTable("relation does not exist") + + with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock): + with pytest.raises(RuntimeError) as exc: + setup_instance._verify_seeded_data( + expected_currency_codes=set(), + expected_unit_codes={"tonnes"}, + ) + + assert "measurement_unit" in str(exc.value) + connection_mock.rollback.assert_called_once() + + +def test_seed_baseline_data_rerun_uses_existing_records( + mock_config: DatabaseConfig, +) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + connection_mock, cursor_mock = _connection_with_cursor() + + currency_rows = [(code, True) for code, *_ in seed_data.CURRENCY_SEEDS] + unit_rows = [(code, True) for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS] + + cursor_mock.fetchall.side_effect = [ + currency_rows, + unit_rows, + currency_rows, + unit_rows, + ] + + with mock.patch.object( + setup_instance, "_application_connection", return_value=connection_mock + ), mock.patch("scripts.seed_data.run_with_namespace") as seed_run: + setup_instance.seed_baseline_data(dry_run=False) + setup_instance.seed_baseline_data(dry_run=False) + + assert seed_run.call_count == 2 + first_namespace = seed_run.call_args_list[0].args[0] + assert isinstance(first_namespace, argparse.Namespace) + assert first_namespace.dry_run is False + assert seed_run.call_args_list[0].kwargs["config"] is setup_instance.config + assert cursor_mock.execute.call_count == 4 + + +def test_ensure_database_raises_with_context(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock = mock.MagicMock() + cursor_mock = mock.MagicMock() + cursor_mock.fetchone.return_value = None + cursor_mock.execute.side_effect = [None, psycopg2.Error("create_fail")] + connection_mock.cursor.return_value = cursor_mock + + with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock): + with pytest.raises(RuntimeError) as exc: + setup_instance.ensure_database() + + assert "Failed to create database" in str(exc.value) + + +def test_ensure_role_raises_with_context_during_creation(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + admin_conn, admin_cursor = _connection_with_cursor() + admin_cursor.fetchone.return_value = None + admin_cursor.execute.side_effect = [None, psycopg2.Error("role_fail")] + + with mock.patch.object( + setup_instance, + "_admin_connection", + side_effect=[admin_conn], + ): + with pytest.raises(RuntimeError) as exc: + setup_instance.ensure_role() + + assert "Failed to create role" in str(exc.value) + + +def test_ensure_role_raises_with_context_during_privilege_grants( + mock_config: DatabaseConfig, +) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + admin_conn, admin_cursor = _connection_with_cursor() + admin_cursor.fetchone.return_value = (1,) + + privilege_conn, privilege_cursor = _connection_with_cursor() + privilege_cursor.execute.side_effect = [psycopg2.Error("grant_fail")] + + with mock.patch.object( + setup_instance, + "_admin_connection", + side_effect=[admin_conn, privilege_conn], + ): + with pytest.raises(RuntimeError) as exc: + setup_instance.ensure_role() + + assert "Failed to grant privileges" in str(exc.value) + + +def test_ensure_database_dry_run_skips_creation(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=True) + + connection_mock = mock.MagicMock() + cursor_mock = mock.MagicMock() + cursor_mock.fetchone.return_value = None + connection_mock.cursor.return_value = cursor_mock + + with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch( + "scripts.setup_database.logger" + ) as logger_mock: + setup_instance.ensure_database() + + # expect only existence check, no create attempt + cursor_mock.execute.assert_called_once() + logger_mock.info.assert_any_call( + "Dry run: would create database '%s'. Run without --dry-run to proceed.", mock_config.database + ) + + +def test_ensure_role_dry_run_skips_creation_and_grants(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=True) + + admin_conn, admin_cursor = _connection_with_cursor() + admin_cursor.fetchone.return_value = None + + with mock.patch.object( + setup_instance, + "_admin_connection", + side_effect=[admin_conn], + ) as conn_mock, mock.patch("scripts.setup_database.logger") as logger_mock: + setup_instance.ensure_role() + + assert conn_mock.call_count == 1 + admin_cursor.execute.assert_called_once() + logger_mock.info.assert_any_call( + "Dry run: would create role '%s'. Run without --dry-run to apply.", mock_config.user + ) + + +def test_register_rollback_skipped_when_dry_run(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=True) + setup_instance._register_rollback("noop", lambda: None) + assert setup_instance._rollback_actions == [] + + +def test_execute_rollbacks_runs_in_reverse_order(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + calls: list[str] = [] + + def first_action() -> None: + calls.append("first") + + def second_action() -> None: + calls.append("second") + + setup_instance._register_rollback("first", first_action) + setup_instance._register_rollback("second", second_action) + + with mock.patch("scripts.setup_database.logger"): + setup_instance.execute_rollbacks() + + assert calls == ["second", "first"] + assert setup_instance._rollback_actions == [] + + +def test_ensure_database_registers_rollback_action(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + connection_mock = mock.MagicMock() + cursor_mock = mock.MagicMock() + cursor_mock.fetchone.return_value = None + connection_mock.cursor.return_value = cursor_mock + + with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch.object( + setup_instance, "_register_rollback" + ) as register_mock, mock.patch.object(setup_instance, "_drop_database") as drop_mock: + setup_instance.ensure_database() + register_mock.assert_called_once() + label, action = register_mock.call_args[0] + assert "drop database" in label + action() + drop_mock.assert_called_once_with(mock_config.database) + + +def test_ensure_role_registers_rollback_actions(mock_config: DatabaseConfig) -> None: + setup_instance = DatabaseSetup(mock_config, dry_run=False) + + admin_conn, admin_cursor = _connection_with_cursor() + admin_cursor.fetchone.return_value = None + privilege_conn, privilege_cursor = _connection_with_cursor() + + with mock.patch.object( + setup_instance, + "_admin_connection", + side_effect=[admin_conn, privilege_conn], + ), mock.patch.object( + setup_instance, "_register_rollback" + ) as register_mock, mock.patch.object( + setup_instance, "_drop_role" + ) as drop_mock, mock.patch.object( + setup_instance, "_revoke_role_privileges" + ) as revoke_mock: + setup_instance.ensure_role() + assert register_mock.call_count == 2 + drop_label, drop_action = register_mock.call_args_list[0][0] + revoke_label, revoke_action = register_mock.call_args_list[1][0] + + assert "drop role" in drop_label + assert "revoke privileges" in revoke_label + + drop_action() + drop_mock.assert_called_once_with(mock_config.user) + + revoke_action() + revoke_mock.assert_called_once() + + +def test_main_triggers_rollbacks_on_failure(mock_config: DatabaseConfig) -> None: + args = argparse.Namespace( + ensure_database=True, + ensure_role=True, + ensure_schema=False, + initialize_schema=False, + run_migrations=False, + seed_data=False, + migrations_dir=None, + db_driver=None, + db_host=None, + db_port=None, + db_name=None, + db_user=None, + db_password=None, + db_schema=None, + admin_url=None, + admin_user=None, + admin_password=None, + admin_db=None, + dry_run=False, + verbose=0, + ) + + with mock.patch.object(setup_db_module, "parse_args", return_value=args), mock.patch.object( + setup_db_module.DatabaseConfig, "from_env", return_value=mock_config + ), mock.patch.object( + setup_db_module, "DatabaseSetup" + ) as setup_cls: + setup_instance = mock.MagicMock() + setup_instance.dry_run = False + setup_instance._rollback_actions = [ + ("drop role", mock.MagicMock()), + ] + setup_instance.ensure_database.side_effect = RuntimeError("boom") + setup_instance.execute_rollbacks = mock.MagicMock() + setup_instance.clear_rollbacks = mock.MagicMock() + setup_cls.return_value = setup_instance + + with pytest.raises(RuntimeError): + setup_db_module.main() + + setup_instance.execute_rollbacks.assert_called_once() + setup_instance.clear_rollbacks.assert_called_once()