diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml new file mode 100644 index 0000000..5e4335d --- /dev/null +++ b/docker-compose.postgres.yml @@ -0,0 +1,23 @@ +version: "3.9" + +services: + postgres: + image: postgres:16-alpine + container_name: calminer_postgres_local + restart: unless-stopped + environment: + POSTGRES_DB: calminer_local + POSTGRES_USER: calminer + POSTGRES_PASSWORD: secret + ports: + - "5433:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U calminer -d calminer_local"] + interval: 10s + timeout: 5s + retries: 10 + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: diff --git a/docs/architecture/14_testing_ci.md b/docs/architecture/14_testing_ci.md index c125d3d..91ad3a7 100644 --- a/docs/architecture/14_testing_ci.md +++ b/docs/architecture/14_testing_ci.md @@ -21,7 +21,7 @@ CalMiner uses a combination of unit, integration, and end-to-end tests to ensure ### CI/CD - Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`. -- `test.yml` runs on every push with cached Python dependencies via `actions/cache@v3`. +- `test.yml` runs on every push, provisions a temporary Postgres 16 service, waits for readiness, executes the setup script in dry-run and live modes, installs Playwright browsers, and finally runs the full pytest suite. - `build-and-push.yml` builds the Docker image with `docker/build-push-action@v2`, reusing GitHub Actions cache-backed layers, and pushes to the Gitea registry. - `deploy.yml` connects to the target host (via `appleboy/ssh-action`) to pull the freshly pushed image and restart the container. - Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`. @@ -99,10 +99,11 @@ pytest tests/e2e/ --headed `test.yml` encapsulates the steps below: - Check out the repository and set up Python 3.10. -- Restore the pip cache (keyed by `requirements.txt`). -- Install project dependencies and Playwright browsers (if needed for E2E). +- Configure the runner's apt proxy (if available), install project dependencies (requirements + test extras), and download Playwright browsers. - Run `pytest` (extend with `--cov` flags when enforcing coverage). +> The pip cache step is temporarily disabled in `test.yml` until the self-hosted cache service is exposed (see `docs/ci-cache-troubleshooting.md`). + `build-and-push.yml` adds: - Registry login using repository secrets. diff --git a/docs/quickstart.md b/docs/quickstart.md index 52d81ce..6a4eee6 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -120,6 +120,38 @@ Typical log output confirms: After a successful run the target database contains all application tables plus `schema_migrations`, and that table records each applied migration file. New installations only record `000_base.sql`; upgraded environments retain historical entries alongside the baseline. +### Local Postgres via Docker Compose + +For local validation without installing Postgres directly, use the provided compose file: + +```powershell +docker compose -f docker-compose.postgres.yml up -d +``` + +The service exposes Postgres 16 on `localhost:5433` with database `calminer_local` and role `calminer`/`secret`. When the container is running, set the granular environment variables before invoking the setup script: + +```powershell +$env:DATABASE_DRIVER = 'postgresql' +$env:DATABASE_HOST = '127.0.0.1' +$env:DATABASE_PORT = '5433' +$env:DATABASE_USER = 'calminer' +$env:DATABASE_PASSWORD = 'secret' +$env:DATABASE_NAME = 'calminer_local' +$env:DATABASE_SCHEMA = 'public' + +python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v +python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v +``` + +When testing is complete, shut down the container (and optional persistent volume) with: + +```powershell +docker compose -f docker-compose.postgres.yml down +docker volume rm calminer_postgres_local_postgres_data # optional cleanup +``` + +Document successful runs (or issues encountered) in `.github/instructions/DONE.TODO.md` for future reference. + ### Seeding reference data `scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required: @@ -154,7 +186,7 @@ The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container | Variable | Value | Purpose | | --- | --- | --- | | `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script | -| `DATABASE_HOST` | `127.0.0.1` | Points to the linked job service | +| `DATABASE_HOST` | `postgres` | Hostname of the Postgres job service container | | `DATABASE_PORT` | `5432` | Default service port | | `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow | | `DATABASE_USER` | `calminer` | Application role used during tests | @@ -166,7 +198,7 @@ The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally. -Because the workflow provisions everything inline, no repository or organization secrets need to be configured for basic CI runs. If you later move the setup step to staging or production pipelines, replace these inline values with secrets managed by the CI platform. +Because the workflow provisions everything inline, no repository or organization secrets need to be configured for basic CI runs. If you later move the setup step to staging or production pipelines, replace these inline values with secrets managed by the CI platform. When running on self-hosted runners behind an HTTP proxy or apt cache, ensure Playwright dependencies and OS packages inherit the same proxy settings that the workflow configures prior to installing browsers. ## Database Objects diff --git a/scripts/migrations/20251021_add_currency_and_unit_fields.sql b/scripts/migrations/20251021_add_currency_and_unit_fields.sql deleted file mode 100644 index 6eebf25..0000000 --- a/scripts/migrations/20251021_add_currency_and_unit_fields.sql +++ /dev/null @@ -1,29 +0,0 @@ --- CalMiner Migration: add currency and unit metadata columns --- Date: 2025-10-21 --- Purpose: align persisted schema with API changes introducing currency selection for --- CAPEX/OPEX costs and unit selection for consumption/production records. - -BEGIN; - --- CAPEX / OPEX -ALTER TABLE capex - ADD COLUMN IF NOT EXISTS currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; - -ALTER TABLE opex - ADD COLUMN IF NOT EXISTS currency_code VARCHAR(3) NOT NULL DEFAULT 'USD'; - --- Consumption tracking -ALTER TABLE consumption - ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); - -ALTER TABLE consumption - ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); - --- Production output -ALTER TABLE production_output - ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64); - -ALTER TABLE production_output - ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16); - -COMMIT; diff --git a/scripts/migrations/20251022_create_currency_table_and_fks.sql b/scripts/migrations/20251022_create_currency_table_and_fks.sql deleted file mode 100644 index 2c31a40..0000000 --- a/scripts/migrations/20251022_create_currency_table_and_fks.sql +++ /dev/null @@ -1,66 +0,0 @@ --- Migration: create currency referential table and convert capex/opex to FK --- Date: 2025-10-22 - -BEGIN; - --- 1) Create currency table -CREATE TABLE IF NOT EXISTS currency ( - id SERIAL PRIMARY KEY, - code VARCHAR(3) NOT NULL UNIQUE, - name VARCHAR(128) NOT NULL, - symbol VARCHAR(8), - is_active BOOLEAN NOT NULL DEFAULT TRUE -); - --- 2) Seed some common currencies (idempotent) -INSERT INTO currency (code, name, symbol, is_active) -SELECT * FROM (VALUES - ('USD','United States Dollar','$',TRUE), - ('EUR','Euro','€',TRUE), - ('CLP','Chilean Peso','CLP$',TRUE), - ('RMB','Chinese Yuan','¥',TRUE), - ('GBP','British Pound','£',TRUE), - ('CAD','Canadian Dollar','C$',TRUE), - ('AUD','Australian Dollar','A$',TRUE) -) AS v(code,name,symbol,is_active) -ON CONFLICT (code) DO NOTHING; - --- 3) Add currency_id columns to capex and opex with nullable true to allow backfill -ALTER TABLE capex ADD COLUMN IF NOT EXISTS currency_id INTEGER; -ALTER TABLE opex ADD COLUMN IF NOT EXISTS currency_id INTEGER; - --- 4) Backfill currency_id using existing currency_code column where present --- Only do this if the currency_code column exists -DO $$ -BEGIN - IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='capex' AND column_name='currency_code') THEN - UPDATE capex SET currency_id = ( - SELECT id FROM currency WHERE code = capex.currency_code LIMIT 1 - ); - END IF; - - IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='opex' AND column_name='currency_code') THEN - UPDATE opex SET currency_id = ( - SELECT id FROM currency WHERE code = opex.currency_code LIMIT 1 - ); - END IF; -END$$; - --- 5) Make currency_id non-nullable and add FK constraint, default to USD where missing -UPDATE currency SET is_active = TRUE WHERE code = 'USD'; - --- Ensure any NULL currency_id uses USD -UPDATE capex SET currency_id = (SELECT id FROM currency WHERE code='USD') WHERE currency_id IS NULL; -UPDATE opex SET currency_id = (SELECT id FROM currency WHERE code='USD') WHERE currency_id IS NULL; - -ALTER TABLE capex ALTER COLUMN currency_id SET NOT NULL; -ALTER TABLE opex ALTER COLUMN currency_id SET NOT NULL; - -ALTER TABLE capex ADD CONSTRAINT fk_capex_currency FOREIGN KEY (currency_id) REFERENCES currency(id); -ALTER TABLE opex ADD CONSTRAINT fk_opex_currency FOREIGN KEY (currency_id) REFERENCES currency(id); - --- 6) Optionally drop old currency_code columns if they exist -ALTER TABLE capex DROP COLUMN IF EXISTS currency_code; -ALTER TABLE opex DROP COLUMN IF EXISTS currency_code; - -COMMIT;