Compare commits
225 Commits
8dedfb8f26
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| cbaff5614a | |||
| f9feb51d33 | |||
| eb2687829f | |||
| ea101d1695 | |||
| 722f93b41c | |||
| e2e5e12f46 | |||
| 4e60168837 | |||
| dae3b59af9 | |||
| 839399363e | |||
| fa8a065138 | |||
| cd0c0ab416 | |||
| 854b1ac713 | |||
| 25fd13ce69 | |||
| 0fec805db1 | |||
| 3746062819 | |||
| 958c165721 | |||
| 6e835c83eb | |||
| 75924fca84 | |||
| ac9ffddbde | |||
| 4e5a4c645d | |||
| e9678b6736 | |||
| e5e346b26a | |||
| b0e623d68e | |||
| 30dbc13fae | |||
| 31b9a1058a | |||
| bcd993d57c | |||
| 1262a4a63f | |||
| fb6816de00 | |||
| 4d0e1a9989 | |||
| ed8e05147c | |||
| 522b1e4105 | |||
| 4f00bf0d3c | |||
| 3551b0356d | |||
| 521a8abc2d | |||
| 1feae7ff85 | |||
| 1240b08740 | |||
| d9fd82b2e3 | |||
| 6c1570a254 | |||
| b1a6df9f90 | |||
| 6d496a599e | |||
| 1199813da0 | |||
| acf6f50bbd | |||
| ad306bd0aa | |||
| ed4187970c | |||
| 0fbe9f543e | |||
| 80825c2c5d | |||
| 44a3bfc1bf | |||
| 1f892ebdbb | |||
| bcdc9e861e | |||
| 23523f70f1 | |||
| 8ef6724960 | |||
| 6e466a3fd2 | |||
| 9d4c807475 | |||
| 9cd555e134 | |||
| e72e297c61 | |||
| 101d9309fd | |||
| 9556f9e1f1 | |||
| 4488cacdc9 | |||
| e06a6ae068 | |||
| 3bdae3c54c | |||
| d89b09fa80 | |||
| 2214bbe64f | |||
| 5d6592d657 | |||
| 3988171b46 | |||
| 1520724cab | |||
| 014d96c105 | |||
| 55fa1f56c1 | |||
| edf86a5447 | |||
| 53eacc352e | |||
| 2bfa498624 | |||
| 4cfc5d9ffa | |||
| ce7f4aa776 | |||
| e0497f58f0 | |||
| 60410fd71d | |||
| f55c77312d | |||
| 63ec4a6953 | |||
| b0ff79ae9c | |||
| 0670d05722 | |||
| 0694d4ec4b | |||
| ce9c174b53 | |||
| f68321cd04 | |||
| 44ff4d0e62 | |||
| 4364927965 | |||
| 795a9f99f4 | |||
| 032e6d2681 | |||
| 51c0fcec95 | |||
| 3051f91ab0 | |||
| e2465188c2 | |||
| 43b1e53837 | |||
| 4b33a5dba3 | |||
| 5f183faa63 | |||
| 1a7581cda0 | |||
| b1a0153a8d | |||
| 609b0d779f | |||
| eaef99f0ac | |||
| 3bc124c11f | |||
| 7058eb4172 | |||
| e0fa3861a6 | |||
| ab328b1a0b | |||
| 24cb3c2f57 | |||
| 118657491c | |||
| 0f79864188 | |||
| 27262bdfa3 | |||
| 3601c2e422 | |||
| 53879a411f | |||
| 2d848c2e09 | |||
| dad862e48e | |||
| 400f85c907 | |||
| 7f5ed6a42d | |||
| 053da332ac | |||
| 02da881d3e | |||
| c39dde3198 | |||
| faea6777a0 | |||
| d36611606d | |||
| 191500aeb7 | |||
| 61b42b3041 | |||
| 8bf46b80c8 | |||
| c69f933684 | |||
| c6fdc2d923 | |||
| dc3ebfbba5 | |||
| 32a96a27c5 | |||
| 203a5d08f2 | |||
| c6a0eb2588 | |||
| d807a50f77 | |||
| 22ddfb671d | |||
| 971b4a19ea | |||
| 5b1278cbea | |||
| b6511e5273 | |||
| bcb15bd0e4 | |||
| 42f8714d71 | |||
| 1881ebe24f | |||
| d90aae3d0a | |||
| 9934d1483d | |||
| df1c971354 | |||
| 3a8aef04b0 | |||
| 45d746d80a | |||
| f1bc7f06b9 | |||
| 82e98efb1b | |||
| f91349dedd | |||
| efee50fdc7 | |||
| e254d50c0c | |||
| 6eef8424b7 | |||
| c1f4902cf4 | |||
| 52450bc487 | |||
| c3449f1986 | |||
| f863808940 | |||
| 37646b571a | |||
| 22f43bed56 | |||
| 72cf06a31d | |||
| b796a053d6 | |||
| 04d7f202b6 | |||
| 1f58de448c | |||
| 807204869f | |||
| ddb23b1da0 | |||
| 26e231d63f | |||
| d98d6ebe83 | |||
| e881be52b5 | |||
| cc8efa3eab | |||
| 29a17595da | |||
| a0431cb630 | |||
| f1afcaa78b | |||
| 36da0609ed | |||
| 26843104ee | |||
| eb509e3dd2 | |||
| 51aa2fa71d | |||
| e1689c3a31 | |||
| 99d9ea7770 | |||
| 2136dbdd44 | |||
| 3da8a50ac4 | |||
| a772960390 | |||
| 89a4f663b5 | |||
| 50446c4248 | |||
| c5a9a7c96f | |||
| 723f6a62b8 | |||
| dcb08ab1b8 | |||
| a6a5f630cc | |||
| b56045ca6a | |||
| 2f07e6fb75 | |||
| 1f8a595243 | |||
| 54137b88d7 | |||
| 7385bdad3e | |||
| 7d0c8bfc53 | |||
| a861efeabf | |||
| 2f5306b793 | |||
| 573e255769 | |||
| 8bb5456864 | |||
| b1d50a56e0 | |||
| e37488bcf6 | |||
| ee0a7a5bf5 | |||
| ef4fb7dcf0 | |||
| 7f4cd33b65 | |||
| 41156a87d1 | |||
| 3fc6a2a9d3 | |||
| f3da80885f | |||
| 97b1c0360b | |||
| e8a86b15e4 | |||
| 300ecebe23 | |||
| 70db34d088 | |||
| 0550928a2f | |||
| ec56099e2a | |||
| c71908c8d9 | |||
| 75f533b87b | |||
| 5b1322ddbc | |||
| 713c9feebb | |||
| e74ec79cc9 | |||
| f3ce095b71 | |||
| 4e1658a638 | |||
| bff75a722e | |||
| d455320eea | |||
| 2182f723f7 | |||
| b3e6546bb9 | |||
| 5c66bf7899 | |||
| 9bd5b60d7a | |||
| 01a702847d | |||
| 1237902d55 | |||
| dd3f3141e3 | |||
| 659b66cc28 | |||
| 2b1771af86 | |||
| 9b0c29bade | |||
| f35607fedc | |||
| 28fea1f3fe | |||
| ae19cd67c4 | |||
| e2f11a1459 | |||
| f864ad563a | |||
| 93a2f54f97 |
@@ -10,6 +10,8 @@ venv/
|
||||
.vscode
|
||||
.git
|
||||
.gitignore
|
||||
.gitea
|
||||
.github
|
||||
.DS_Store
|
||||
dist
|
||||
build
|
||||
@@ -17,5 +19,9 @@ build
|
||||
*.sqlite3
|
||||
.env
|
||||
.env.*
|
||||
.Dockerfile
|
||||
.dockerignore
|
||||
coverage/
|
||||
logs/
|
||||
backups/
|
||||
tests/e2e/artifacts/
|
||||
scripts/__pycache__/
|
||||
reports/
|
||||
|
||||
25
.env.development
Normal file
25
.env.development
Normal file
@@ -0,0 +1,25 @@
|
||||
# Development Environment Configuration
|
||||
ENVIRONMENT=development
|
||||
DEBUG=true
|
||||
LOG_LEVEL=DEBUG
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_HOST=postgres
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=calminer
|
||||
DATABASE_PASSWORD=calminer_password
|
||||
DATABASE_NAME=calminer_db
|
||||
DATABASE_DRIVER=postgresql
|
||||
|
||||
# Application Settings
|
||||
CALMINER_EXPORT_MAX_ROWS=1000
|
||||
CALMINER_IMPORT_MAX_ROWS=10000
|
||||
CALMINER_EXPORT_METADATA=true
|
||||
CALMINER_IMPORT_STAGING_TTL=300
|
||||
|
||||
# Admin Seeding (for development)
|
||||
CALMINER_SEED_ADMIN_EMAIL=admin@calminer.local
|
||||
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||
CALMINER_SEED_ADMIN_PASSWORD=ChangeMe123!
|
||||
CALMINER_SEED_ADMIN_ROLES=admin
|
||||
CALMINER_SEED_FORCE=false
|
||||
12
.env.example
12
.env.example
@@ -10,5 +10,13 @@ DATABASE_NAME=calminer
|
||||
# Optional: set a schema (comma-separated for multiple entries)
|
||||
# DATABASE_SCHEMA=public
|
||||
|
||||
# Legacy fallback (still supported, but granular settings are preferred)
|
||||
# DATABASE_URL=postgresql://<user>:<password>@localhost:5432/calminer
|
||||
# Default administrative credentials are provided at deployment time through environment variables
|
||||
# (`CALMINER_SEED_ADMIN_EMAIL`, `CALMINER_SEED_ADMIN_USERNAME`, `CALMINER_SEED_ADMIN_PASSWORD`, `CALMINER_SEED_ADMIN_ROLES`).
|
||||
# These values are consumed by a shared bootstrap helper on application startup, ensuring mandatory roles and the administrator account exist before any user interaction.
|
||||
CALMINER_SEED_ADMIN_EMAIL=<email>
|
||||
CALMINER_SEED_ADMIN_USERNAME=<username>
|
||||
CALMINER_SEED_ADMIN_PASSWORD=<password>
|
||||
CALMINER_SEED_ADMIN_ROLES=<roles>
|
||||
# Operators can request a managed credential reset by setting `CALMINER_SEED_FORCE=true`.
|
||||
# On the next startup the helper rotates the admin password and reapplies role assignments, so downstream environments must update stored secrets immediately after the reset.
|
||||
# CALMINER_SEED_FORCE=false
|
||||
25
.env.production
Normal file
25
.env.production
Normal file
@@ -0,0 +1,25 @@
|
||||
# Production Environment Configuration
|
||||
ENVIRONMENT=production
|
||||
DEBUG=false
|
||||
LOG_LEVEL=WARNING
|
||||
|
||||
# Database Configuration (MUST be set externally - no defaults)
|
||||
DATABASE_HOST=
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=
|
||||
DATABASE_PASSWORD=
|
||||
DATABASE_NAME=
|
||||
DATABASE_DRIVER=postgresql
|
||||
|
||||
# Application Settings
|
||||
CALMINER_EXPORT_MAX_ROWS=100000
|
||||
CALMINER_IMPORT_MAX_ROWS=100000
|
||||
CALMINER_EXPORT_METADATA=true
|
||||
CALMINER_IMPORT_STAGING_TTL=3600
|
||||
|
||||
# Admin Seeding (for production - set strong password)
|
||||
CALMINER_SEED_ADMIN_EMAIL=admin@calminer.com
|
||||
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||
CALMINER_SEED_ADMIN_PASSWORD=CHANGE_THIS_VERY_STRONG_PASSWORD
|
||||
CALMINER_SEED_ADMIN_ROLES=admin
|
||||
CALMINER_SEED_FORCE=false
|
||||
25
.env.staging
Normal file
25
.env.staging
Normal file
@@ -0,0 +1,25 @@
|
||||
# Staging Environment Configuration
|
||||
ENVIRONMENT=staging
|
||||
DEBUG=false
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# Database Configuration (override with actual staging values)
|
||||
DATABASE_HOST=postgres
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=calminer_staging
|
||||
DATABASE_PASSWORD=CHANGE_THIS_STRONG_PASSWORD
|
||||
DATABASE_NAME=calminer_staging_db
|
||||
DATABASE_DRIVER=postgresql
|
||||
|
||||
# Application Settings
|
||||
CALMINER_EXPORT_MAX_ROWS=50000
|
||||
CALMINER_IMPORT_MAX_ROWS=50000
|
||||
CALMINER_EXPORT_METADATA=true
|
||||
CALMINER_IMPORT_STAGING_TTL=600
|
||||
|
||||
# Admin Seeding (for staging)
|
||||
CALMINER_SEED_ADMIN_EMAIL=admin@staging.calminer.com
|
||||
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||
CALMINER_SEED_ADMIN_PASSWORD=CHANGE_THIS_STRONG_PASSWORD
|
||||
CALMINER_SEED_ADMIN_ROLES=admin
|
||||
CALMINER_SEED_FORCE=false
|
||||
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
* text=auto
|
||||
|
||||
Dockerfile text eol=lf
|
||||
@@ -1,28 +0,0 @@
|
||||
name: Build and Push Docker Image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to Gitea Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.REGISTRY_URL }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ secrets.REGISTRY_URL }}/${{ secrets.REGISTRY_USERNAME }}/calminer:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
232
.gitea/workflows/ci-build.yml
Normal file
232
.gitea/workflows/ci-build.yml
Normal file
@@ -0,0 +1,232 @@
|
||||
name: CI - Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
outputs:
|
||||
allow_push: ${{ steps.meta.outputs.allow_push }}
|
||||
ref_name: ${{ steps.meta.outputs.ref_name }}
|
||||
event_name: ${{ steps.meta.outputs.event_name }}
|
||||
sha: ${{ steps.meta.outputs.sha }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEFAULT_BRANCH: main
|
||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
REGISTRY_CONTAINER_NAME: calminer
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Collect workflow metadata
|
||||
id: meta
|
||||
shell: bash
|
||||
env:
|
||||
DEFAULT_BRANCH: ${{ env.DEFAULT_BRANCH }}
|
||||
run: |
|
||||
git_ref="${GITEA_REF:-${GITHUB_REF:-}}"
|
||||
ref_name="${GITEA_REF_NAME:-${GITHUB_REF_NAME:-}}"
|
||||
if [ -z "$ref_name" ] && [ -n "$git_ref" ]; then
|
||||
ref_name="${git_ref##*/}"
|
||||
fi
|
||||
event_name="${GITEA_EVENT_NAME:-${GITHUB_EVENT_NAME:-}}"
|
||||
sha="${GITEA_SHA:-${GITHUB_SHA:-}}"
|
||||
if [ -z "$sha" ]; then
|
||||
sha="$(git rev-parse HEAD)"
|
||||
fi
|
||||
|
||||
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ] && [ "$event_name" != "pull_request" ]; then
|
||||
echo "allow_push=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "allow_push=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
echo "ref_name=$ref_name" >> "$GITHUB_OUTPUT"
|
||||
echo "event_name=$event_name" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=$sha" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Validate registry configuration
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${REGISTRY_URL}" ]; then
|
||||
echo "::error::REGISTRY_URL secret not configured. Configure it with your Gitea container registry host." >&2
|
||||
exit 1
|
||||
fi
|
||||
server_url="${GITEA_SERVER_URL:-${GITHUB_SERVER_URL:-}}"
|
||||
server_host="${server_url#http://}"
|
||||
server_host="${server_host#https://}"
|
||||
server_host="${server_host%%/*}"
|
||||
server_host="${server_host%%:*}"
|
||||
registry_host="${REGISTRY_URL#http://}"
|
||||
registry_host="${registry_host#https://}"
|
||||
registry_host="${registry_host%%/*}"
|
||||
registry_host="${registry_host%%:*}"
|
||||
if [ -n "${server_host}" ] && ! printf '%s' "${registry_host}" | grep -qi "${server_host}"; then
|
||||
echo "::warning::REGISTRY_URL (${REGISTRY_URL}) does not match current Gitea host (${server_host}). Ensure this registry endpoint is managed by Gitea." >&2
|
||||
fi
|
||||
registry_repository="${registry_host}/allucanget/${REGISTRY_CONTAINER_NAME}"
|
||||
echo "REGISTRY_HOST=${registry_host}" >> "$GITHUB_ENV"
|
||||
echo "REGISTRY_REPOSITORY=${registry_repository}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set up QEMU and Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to gitea registry
|
||||
if: ${{ steps.meta.outputs.allow_push == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY_HOST }}
|
||||
username: ${{ env.REGISTRY_USERNAME }}
|
||||
password: ${{ env.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Build image
|
||||
id: build-image
|
||||
env:
|
||||
REGISTRY_REPOSITORY: ${{ env.REGISTRY_REPOSITORY }}
|
||||
REGISTRY_CONTAINER_NAME: ${{ env.REGISTRY_CONTAINER_NAME }}
|
||||
SHA_TAG: ${{ steps.meta.outputs.sha }}
|
||||
PUSH_IMAGE: ${{ steps.meta.outputs.allow_push == 'true' && env.REGISTRY_HOST != '' && env.REGISTRY_USERNAME != '' && env.REGISTRY_PASSWORD != '' }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
LOG_FILE=build.log
|
||||
if [ "${PUSH_IMAGE}" = "true" ]; then
|
||||
docker buildx build \
|
||||
--load \
|
||||
--tag "${REGISTRY_REPOSITORY}:latest" \
|
||||
--tag "${REGISTRY_REPOSITORY}:${SHA_TAG}" \
|
||||
--file Dockerfile \
|
||||
. 2>&1 | tee "${LOG_FILE}"
|
||||
else
|
||||
docker buildx build \
|
||||
--load \
|
||||
--tag "${REGISTRY_CONTAINER_NAME}:ci" \
|
||||
--file Dockerfile \
|
||||
. 2>&1 | tee "${LOG_FILE}"
|
||||
fi
|
||||
|
||||
- name: Push image
|
||||
if: ${{ steps.meta.outputs.allow_push == 'true' }}
|
||||
env:
|
||||
REGISTRY_REPOSITORY: ${{ env.REGISTRY_REPOSITORY }}
|
||||
SHA_TAG: ${{ steps.meta.outputs.sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${REGISTRY_REPOSITORY}" ]; then
|
||||
echo "::error::REGISTRY_REPOSITORY not defined; cannot push image" >&2
|
||||
exit 1
|
||||
fi
|
||||
docker push "${REGISTRY_REPOSITORY}:${SHA_TAG}"
|
||||
docker push "${REGISTRY_REPOSITORY}:latest"
|
||||
|
||||
- name: Upload docker build logs
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-build-logs
|
||||
path: build.log
|
||||
|
||||
deploy:
|
||||
needs: build
|
||||
if: needs.build.outputs.allow_push == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||
REGISTRY_CONTAINER_NAME: calminer
|
||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||
STAGING_KUBE_CONFIG: ${{ secrets.STAGING_KUBE_CONFIG }}
|
||||
PROD_KUBE_CONFIG: ${{ secrets.PROD_KUBE_CONFIG }}
|
||||
K8S_DEPLOY_ENABLED: ${{ secrets.K8S_DEPLOY_ENABLED }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Resolve registry repository
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${REGISTRY_URL}" ]; then
|
||||
echo "::error::REGISTRY_URL secret not configured. Configure it with your Gitea container registry host." >&2
|
||||
exit 1
|
||||
fi
|
||||
registry_host="${REGISTRY_URL#http://}"
|
||||
registry_host="${registry_host#https://}"
|
||||
registry_host="${registry_host%%/*}"
|
||||
registry_host="${registry_host%%:*}"
|
||||
registry_repository="${registry_host}/allucanget/${REGISTRY_CONTAINER_NAME}"
|
||||
echo "REGISTRY_HOST=${registry_host}" >> "$GITHUB_ENV"
|
||||
echo "REGISTRY_REPOSITORY=${registry_repository}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Report Kubernetes deployment toggle
|
||||
run: |
|
||||
set -euo pipefail
|
||||
enabled="${K8S_DEPLOY_ENABLED:-}"
|
||||
if [ "${enabled}" = "true" ]; then
|
||||
echo "Kubernetes deployment is enabled for this run."
|
||||
else
|
||||
echo "::notice::Kubernetes deployment steps are disabled (set secrets.K8S_DEPLOY_ENABLED to 'true' to enable)."
|
||||
fi
|
||||
|
||||
- name: Capture commit metadata
|
||||
id: commit_meta
|
||||
run: |
|
||||
set -euo pipefail
|
||||
message="$(git log -1 --pretty=%B | tr '\n' ' ')"
|
||||
echo "message=$message" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up kubectl for staging
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||
uses: azure/k8s-set-context@v3
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ env.STAGING_KUBE_CONFIG }}
|
||||
|
||||
- name: Set up kubectl for production
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||
uses: azure/k8s-set-context@v3
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ env.PROD_KUBE_CONFIG }}
|
||||
|
||||
- name: Deploy to staging
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||
run: |
|
||||
kubectl set image deployment/calminer-app calminer=${REGISTRY_REPOSITORY}:latest
|
||||
kubectl apply -f k8s/configmap.yaml
|
||||
kubectl apply -f k8s/secret.yaml
|
||||
kubectl rollout status deployment/calminer-app
|
||||
|
||||
- name: Collect staging deployment logs
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||
run: |
|
||||
mkdir -p logs/deployment/staging
|
||||
kubectl get pods -o wide > logs/deployment/staging/pods.txt
|
||||
kubectl get deployment calminer-app -o yaml > logs/deployment/staging/deployment.yaml
|
||||
kubectl logs deployment/calminer-app --all-containers=true --tail=500 > logs/deployment/staging/calminer-app.log
|
||||
|
||||
- name: Deploy to production
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||
run: |
|
||||
kubectl set image deployment/calminer-app calminer=${REGISTRY_REPOSITORY}:latest
|
||||
kubectl apply -f k8s/configmap.yaml
|
||||
kubectl apply -f k8s/secret.yaml
|
||||
kubectl rollout status deployment/calminer-app
|
||||
|
||||
- name: Collect production deployment logs
|
||||
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||
run: |
|
||||
mkdir -p logs/deployment/production
|
||||
kubectl get pods -o wide > logs/deployment/production/pods.txt
|
||||
kubectl get deployment calminer-app -o yaml > logs/deployment/production/deployment.yaml
|
||||
kubectl logs deployment/calminer-app --all-containers=true --tail=500 > logs/deployment/production/calminer-app.log
|
||||
|
||||
- name: Upload deployment logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: deployment-logs
|
||||
path: logs/deployment
|
||||
if-no-files-found: ignore
|
||||
44
.gitea/workflows/ci-lint.yml
Normal file
44
.gitea/workflows/ci-lint.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
name: CI - Lint
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
APT_CACHER_NG: http://192.168.88.14:3142
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Configure apt proxy
|
||||
run: |
|
||||
if [ -n "${APT_CACHER_NG}" ]; then
|
||||
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
||||
fi
|
||||
|
||||
- name: Install system packages
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y build-essential libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
|
||||
- name: Run Ruff
|
||||
run: ruff check .
|
||||
|
||||
- name: Run Black
|
||||
run: black --check .
|
||||
|
||||
- name: Run Bandit
|
||||
run: bandit -c pyproject.toml -r tests
|
||||
73
.gitea/workflows/ci-test.yml
Normal file
73
.gitea/workflows/ci-test.yml
Normal file
@@ -0,0 +1,73 @@
|
||||
name: CI - Test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
APT_CACHER_NG: http://192.168.88.14:3142
|
||||
DB_DRIVER: postgresql+psycopg2
|
||||
DB_HOST: 192.168.88.35
|
||||
DB_NAME: calminer_test
|
||||
DB_USER: calminer
|
||||
DB_PASSWORD: calminer_password
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17
|
||||
env:
|
||||
POSTGRES_USER: ${{ env.DB_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.DB_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.DB_NAME }}
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Configure apt proxy
|
||||
run: |
|
||||
if [ -n "${APT_CACHER_NG}" ]; then
|
||||
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
||||
fi
|
||||
|
||||
- name: Install system packages
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y build-essential libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
|
||||
- name: Run tests
|
||||
env:
|
||||
DATABASE_DRIVER: ${{ env.DB_DRIVER }}
|
||||
DATABASE_HOST: postgres
|
||||
DATABASE_PORT: 5432
|
||||
DATABASE_USER: ${{ env.DB_USER }}
|
||||
DATABASE_PASSWORD: ${{ env.DB_PASSWORD }}
|
||||
DATABASE_NAME: ${{ env.DB_NAME }}
|
||||
run: |
|
||||
pytest --cov=. --cov-report=term-missing --cov-report=xml --cov-fail-under=80 --junitxml=pytest-report.xml
|
||||
|
||||
- name: Upload test artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-artifacts
|
||||
path: |
|
||||
coverage.xml
|
||||
pytest-report.xml
|
||||
30
.gitea/workflows/ci.yml
Normal file
30
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
- v2
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
uses: ./.gitea/workflows/ci-lint.yml
|
||||
secrets: inherit
|
||||
|
||||
test:
|
||||
needs: lint
|
||||
uses: ./.gitea/workflows/ci-test.yml
|
||||
secrets: inherit
|
||||
|
||||
build:
|
||||
needs:
|
||||
- lint
|
||||
- test
|
||||
uses: ./.gitea/workflows/ci-build.yml
|
||||
secrets: inherit
|
||||
78
.gitea/workflows/deploy-coolify.yml
Normal file
78
.gitea/workflows/deploy-coolify.yml
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Deploy - Coolify
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
COOLIFY_BASE_URL: ${{ secrets.COOLIFY_BASE_URL }}
|
||||
COOLIFY_API_TOKEN: ${{ secrets.COOLIFY_API_TOKEN }}
|
||||
COOLIFY_APPLICATION_ID: ${{ secrets.COOLIFY_APPLICATION_ID }}
|
||||
COOLIFY_DEPLOY_ENV: ${{ secrets.COOLIFY_DEPLOY_ENV }}
|
||||
DOCKER_COMPOSE_PATH: docker-compose.prod.yml
|
||||
ENV_FILE_PATH: deploy/.env
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare compose bundle
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p deploy
|
||||
cp "$DOCKER_COMPOSE_PATH" deploy/docker-compose.yml
|
||||
if [ -n "$COOLIFY_DEPLOY_ENV" ]; then
|
||||
printf '%s\n' "$COOLIFY_DEPLOY_ENV" > "$ENV_FILE_PATH"
|
||||
elif [ ! -f "$ENV_FILE_PATH" ]; then
|
||||
echo "::error::COOLIFY_DEPLOY_ENV secret not configured and deploy/.env missing" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate Coolify secrets
|
||||
run: |
|
||||
set -euo pipefail
|
||||
missing=0
|
||||
for var in COOLIFY_BASE_URL COOLIFY_API_TOKEN COOLIFY_APPLICATION_ID; do
|
||||
if [ -z "${!var}" ]; then
|
||||
echo "::error::Missing required secret: $var"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Trigger deployment via Coolify API
|
||||
run: |
|
||||
set -euo pipefail
|
||||
api_url="$COOLIFY_BASE_URL/api/v1/deploy"
|
||||
payload=$(jq -n --arg uuid "$COOLIFY_APPLICATION_ID" '{ uuid: $uuid }')
|
||||
response=$(curl -sS -w '\n%{http_code}' \
|
||||
-X POST "$api_url" \
|
||||
-H "Authorization: Bearer $COOLIFY_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$payload")
|
||||
body=$(echo "$response" | head -n -1)
|
||||
status=$(echo "$response" | tail -n1)
|
||||
echo "Deploy response status: $status"
|
||||
echo "$body"
|
||||
printf '%s' "$body" > deploy/coolify-response.json
|
||||
if [ "$status" -ge 400 ]; then
|
||||
echo "::error::Deployment request failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload deployment bundle
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coolify-deploy-bundle
|
||||
path: |
|
||||
deploy/docker-compose.yml
|
||||
deploy/.env
|
||||
deploy/coolify-response.json
|
||||
if-no-files-found: warn
|
||||
@@ -1,29 +0,0 @@
|
||||
name: Deploy to Server
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: SSH and deploy
|
||||
uses: appleboy/ssh-action@master
|
||||
with:
|
||||
host: ${{ secrets.SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USERNAME }}
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: |
|
||||
docker pull ${{ secrets.REGISTRY_URL }}/${{ secrets.REGISTRY_USERNAME }}/calminer:latest
|
||||
docker stop calminer || true
|
||||
docker rm calminer || true
|
||||
docker run -d --name calminer -p 8000:8000 \
|
||||
-e DATABASE_DRIVER=${{ secrets.DATABASE_DRIVER }} \
|
||||
-e DATABASE_HOST=${{ secrets.DATABASE_HOST }} \
|
||||
-e DATABASE_PORT=${{ secrets.DATABASE_PORT }} \
|
||||
-e DATABASE_USER=${{ secrets.DATABASE_USER }} \
|
||||
-e DATABASE_PASSWORD=${{ secrets.DATABASE_PASSWORD }} \
|
||||
-e DATABASE_NAME=${{ secrets.DATABASE_NAME }} \
|
||||
-e DATABASE_SCHEMA=${{ secrets.DATABASE_SCHEMA }} \
|
||||
${{ secrets.REGISTRY_URL }}/${{ secrets.REGISTRY_USERNAME }}/calminer:latest
|
||||
@@ -1,24 +0,0 @@
|
||||
name: Run Tests
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
- name: Run tests
|
||||
run: pytest
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -16,6 +16,10 @@ env/
|
||||
|
||||
# environment variables
|
||||
.env
|
||||
*.env
|
||||
.env.*
|
||||
# except example files
|
||||
!config/*.env.example
|
||||
|
||||
# github instruction files
|
||||
.github/instructions/
|
||||
@@ -35,10 +39,22 @@ htmlcov/
|
||||
# Mypy cache
|
||||
.mypy_cache/
|
||||
|
||||
# Linting cache
|
||||
.ruff_cache/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# SQLite database
|
||||
data/
|
||||
*.sqlite3
|
||||
test.db
|
||||
test*.db
|
||||
local*.db
|
||||
|
||||
# Act runner files
|
||||
.runner
|
||||
|
||||
# Devcontainer files
|
||||
.devcontainer/devcontainer.json
|
||||
.devcontainer/docker-compose.yml
|
||||
|
||||
13
.pre-commit-config.yaml
Normal file
13
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.8.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.9
|
||||
hooks:
|
||||
- id: bandit
|
||||
167
Dockerfile
167
Dockerfile
@@ -1,38 +1,147 @@
|
||||
# Multi-stage Dockerfile to keep final image small
|
||||
FROM python:3.10-slim AS builder
|
||||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
# Install build-time packages and Python dependencies in one layer
|
||||
WORKDIR /app
|
||||
COPY requirements.txt /app/requirements.txt
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends build-essential gcc libpq-dev \
|
||||
&& python -m pip install --upgrade pip \
|
||||
&& pip install --no-cache-dir --prefix=/install -r /app/requirements.txt \
|
||||
&& apt-get purge -y --auto-remove build-essential gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ARG PYTHON_VERSION=3.11-slim
|
||||
ARG APT_CACHE_URL=http://192.168.88.14:3142
|
||||
|
||||
FROM python:${PYTHON_VERSION} AS builder
|
||||
ARG APT_CACHE_URL
|
||||
|
||||
ENV \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
FROM python:3.10-slim
|
||||
WORKDIR /app
|
||||
|
||||
# Copy installed packages from builder
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY requirements.txt ./requirements.txt
|
||||
|
||||
# Production environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
DATABASE_DRIVER=postgresql \
|
||||
DATABASE_HOST=localhost \
|
||||
DATABASE_PORT=5432 \
|
||||
DATABASE_USER=calminer \
|
||||
DATABASE_PASSWORD=changeme \
|
||||
DATABASE_NAME=calminer \
|
||||
DATABASE_SCHEMA=public
|
||||
RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF'
|
||||
set -e
|
||||
|
||||
python3 <<'PY'
|
||||
import os, socket, urllib.parse
|
||||
|
||||
url = os.environ.get('APT_CACHE_URL', '').strip()
|
||||
if url:
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
host = parsed.hostname
|
||||
port = parsed.port or (80 if parsed.scheme == 'http' else 443)
|
||||
if host:
|
||||
sock = socket.socket()
|
||||
sock.settimeout(1)
|
||||
try:
|
||||
sock.connect((host, port))
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
with open('/etc/apt/apt.conf.d/01proxy', 'w', encoding='utf-8') as fh:
|
||||
fh.write(f"Acquire::http::Proxy \"{url}\";\n")
|
||||
fh.write(f"Acquire::https::Proxy \"{url}\";\n")
|
||||
finally:
|
||||
sock.close()
|
||||
PY
|
||||
APT_PROXY_CONFIG=/etc/apt/apt.conf.d/01proxy
|
||||
|
||||
apt_update_with_fallback() {
|
||||
if ! apt-get update; then
|
||||
rm -f "$APT_PROXY_CONFIG"
|
||||
apt-get update
|
||||
fi
|
||||
}
|
||||
|
||||
apt_install_with_fallback() {
|
||||
if ! apt-get install -y --no-install-recommends "$@"; then
|
||||
rm -f "$APT_PROXY_CONFIG"
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
apt_update_with_fallback
|
||||
apt_install_with_fallback build-essential gcc libpq-dev
|
||||
pip install --upgrade pip
|
||||
pip wheel --no-deps --wheel-dir /wheels -r requirements.txt
|
||||
apt-get purge -y --auto-remove build-essential gcc
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
EOF
|
||||
|
||||
FROM python:${PYTHON_VERSION} AS runtime
|
||||
ARG APT_CACHE_URL
|
||||
|
||||
ENV \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/home/appuser/.local/bin:${PATH}"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system app && useradd --system --create-home --gid app appuser
|
||||
|
||||
RUN /bin/bash <<'EOF'
|
||||
set -e
|
||||
|
||||
python3 <<'PY'
|
||||
import os, socket, urllib.parse
|
||||
|
||||
url = os.environ.get('APT_CACHE_URL', '').strip()
|
||||
if url:
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
host = parsed.hostname
|
||||
port = parsed.port or (80 if parsed.scheme == 'http' else 443)
|
||||
if host:
|
||||
sock = socket.socket()
|
||||
sock.settimeout(1)
|
||||
try:
|
||||
sock.connect((host, port))
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
with open('/etc/apt/apt.conf.d/01proxy', 'w', encoding='utf-8') as fh:
|
||||
fh.write(f"Acquire::http::Proxy \"{url}\";\n")
|
||||
fh.write(f"Acquire::https::Proxy \"{url}\";\n")
|
||||
finally:
|
||||
sock.close()
|
||||
PY
|
||||
APT_PROXY_CONFIG=/etc/apt/apt.conf.d/01proxy
|
||||
|
||||
apt_update_with_fallback() {
|
||||
if ! apt-get update; then
|
||||
rm -f "$APT_PROXY_CONFIG"
|
||||
apt-get update
|
||||
fi
|
||||
}
|
||||
|
||||
apt_install_with_fallback() {
|
||||
if ! apt-get install -y --no-install-recommends "$@"; then
|
||||
rm -f "$APT_PROXY_CONFIG"
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
apt_update_with_fallback
|
||||
apt_install_with_fallback libpq5
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
EOF
|
||||
|
||||
COPY --from=builder /wheels /wheels
|
||||
COPY --from=builder /app/requirements.txt /tmp/requirements.txt
|
||||
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install --no-cache-dir --find-links=/wheels -r /tmp/requirements.txt \
|
||||
&& rm -rf /wheels /tmp/requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . /app
|
||||
|
||||
# Expose service port
|
||||
EXPOSE 8000
|
||||
RUN chown -R appuser:app /app
|
||||
|
||||
# Run the FastAPI app with uvicorn
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
USER appuser
|
||||
|
||||
EXPOSE 8003
|
||||
|
||||
ENTRYPOINT ["uvicorn"]
|
||||
|
||||
CMD ["main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]
|
||||
|
||||
63
README.md
63
README.md
@@ -6,65 +6,8 @@ Focuses on ore mining operations and covering parameters such as capital and ope
|
||||
|
||||
The system is designed to help mining companies make informed decisions by simulating various scenarios and analyzing potential outcomes based on stochastic variables.
|
||||
|
||||
A range of features are implemented to support these functionalities.
|
||||
|
||||
## Features
|
||||
|
||||
- **Scenario Management**: Manage multiple mining scenarios with independent parameter sets and outputs.
|
||||
- **Process Parameters**: Define and persist process inputs via FastAPI endpoints and template-driven forms.
|
||||
- **Cost Tracking**: Capture capital (`capex`) and operational (`opex`) expenditures per scenario.
|
||||
- **Consumption Tracking**: Record resource consumption (chemicals, fuel, water, scrap) tied to scenarios.
|
||||
- **Production Output**: Store production metrics such as tonnage, recovery, and revenue drivers.
|
||||
- **Equipment Management**: Register scenario-specific equipment inventories.
|
||||
- **Maintenance Logging**: Log maintenance events against equipment with dates and costs.
|
||||
- **Reporting Dashboard**: Surface aggregated statistics for simulation outputs with an interactive Chart.js dashboard.
|
||||
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
||||
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
||||
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
||||
- **Modular Frontend Scripts**: Page-specific interactions now live in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
||||
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
||||
|
||||
## Documentation & quickstart
|
||||
|
||||
This repository contains detailed developer and architecture documentation in the `docs/` folder.
|
||||
|
||||
[Quickstart](docs/quickstart.md) contains developer quickstart, migrations, testing and current status.
|
||||
|
||||
Key architecture documents: see [architecture](docs/architecture/README.md) for the arc42-based architecture documentation.
|
||||
|
||||
For contributors: the `routes/`, `models/` and `services/` folders contain the primary application code. Tests and E2E specs are in `tests/`.
|
||||
|
||||
## Run with Docker
|
||||
|
||||
The repository ships with a multi-stage `Dockerfile` that produces a slim runtime image.
|
||||
|
||||
```powershell
|
||||
# Build the image locally
|
||||
docker build -t calminer:latest .
|
||||
|
||||
# Run the container (exposes FastAPI on http://localhost:8000)
|
||||
docker run --rm -p 8000:8000 calminer:latest
|
||||
|
||||
# Provide database configuration via granular environment variables
|
||||
docker run --rm -p 8000:8000 ^
|
||||
-e DATABASE_DRIVER="postgresql" ^
|
||||
-e DATABASE_HOST="db.host" ^
|
||||
-e DATABASE_PORT="5432" ^
|
||||
-e DATABASE_USER="calminer" ^
|
||||
-e DATABASE_PASSWORD="s3cret" ^
|
||||
-e DATABASE_NAME="calminer" ^
|
||||
-e DATABASE_SCHEMA="public" ^
|
||||
calminer:latest
|
||||
```
|
||||
|
||||
Use `docker compose` or an orchestrator of your choice to co-locate PostgreSQL/Redis alongside the app when needed. The image expects migrations to be applied before startup.
|
||||
|
||||
## CI/CD expectations
|
||||
|
||||
CalMiner uses Gitea Actions workflows stored in `.gitea/workflows/`:
|
||||
|
||||
- `test.yml` runs style/unit/e2e suites on every push with cached Python dependencies.
|
||||
- `build-and-push.yml` builds the Docker image, reuses cached layers, and pushes to the configured registry.
|
||||
- `deploy.yml` pulls the pushed image on the target host and restarts the container.
|
||||
|
||||
Pipelines assume the following secrets are provisioned in the Gitea instance: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, and `SSH_PRIVATE_KEY`.
|
||||
- Detailed developer, architecture, and operations guides live in the companion [calminer-docs](../calminer-docs/) repository. Please see the [README](../calminer-docs/README.md) there for instructions.
|
||||
- For a local run, create a `.env` (see `.env.example`), install requirements, then execute `python -m scripts.init_db` followed by `uvicorn main:app --reload`. The initializer is safe to rerun and seeds demo data automatically.
|
||||
- To wipe and recreate the schema in development, run `CALMINER_ENV=development python -m scripts.reset_db` before invoking the initializer again.
|
||||
|
||||
124
changelog.md
Normal file
124
changelog.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-15
|
||||
|
||||
- Fixed dev container setup by reviewing logs, identifying mount errors, implementing fixes, and validating the configuration.
|
||||
|
||||
## 2025-11-14
|
||||
|
||||
- Completed Coolify deployment automation with workflow and documentation.
|
||||
- Improved build workflow for registry authentication and tagging.
|
||||
- Updated production compose and added deployment guidance.
|
||||
- Added optional Kubernetes deployment toggle.
|
||||
|
||||
## 2025-11-13
|
||||
|
||||
- Aligned UI styles and ensured accessibility.
|
||||
- Restructured navigation under project-scenario-calculation hierarchy.
|
||||
- Reorganized documentation for better structure.
|
||||
- Refactored navigation sidebar with database-driven data.
|
||||
- Migrated sidebar rendering to API endpoint.
|
||||
- Created templates for data import and export.
|
||||
- Updated relationships for projects, scenarios, and profitability.
|
||||
- Enhanced scenario frontend templates with project context.
|
||||
- Scoped profitability calculator to scenario level.
|
||||
- Added navigation links for opex planner.
|
||||
- Documented opex planner features.
|
||||
- Integrated opex calculations with persistence and tests.
|
||||
- Implemented capex calculations end-to-end.
|
||||
- Added basic profitability calculations.
|
||||
- Developed reporting endpoints and templates.
|
||||
- Integrated charting for visualizations.
|
||||
- Performed manual testing of capex planner.
|
||||
- Added unit tests for opex service.
|
||||
- Added integration tests for opex.
|
||||
|
||||
## 2025-11-12
|
||||
|
||||
- Fixed reporting dashboard error by correcting route reference.
|
||||
- Completed navigation validation by adding missing routes and templates for various pages.
|
||||
- Fixed template rendering error with URL objects.
|
||||
- Integrated charting for interactive visualizations.
|
||||
- Verified local application startup and routes.
|
||||
- Fixed docker-compose configuration.
|
||||
- Verified deployment pipeline.
|
||||
- Documented data models.
|
||||
- Updated performance model to clear warnings.
|
||||
- Replaced migration system with simpler initializer.
|
||||
- Removed hardcoded secrets from tests.
|
||||
- Centralized security scanning config.
|
||||
- Fixed admin setup with migration.
|
||||
- Resolved code style warnings.
|
||||
- Enhanced deploy logging.
|
||||
- Fixed CI template issue.
|
||||
- Added SQLite database support.
|
||||
|
||||
## 2025-11-11
|
||||
|
||||
- Combined old migration files into one initial schema.
|
||||
- Added base routing to redirect users to login or dashboard.
|
||||
- Added end-to-end tests for login flow.
|
||||
- Updated templates to use logo image consistently.
|
||||
- Centralized currency validation across the app.
|
||||
- Updated services to show friendly error messages.
|
||||
- Linked projects to pricing settings.
|
||||
- Bootstrapped pricing settings at startup.
|
||||
- Extended pricing support with persisted data.
|
||||
- Added financial helpers for NPV, IRR, payback.
|
||||
- Documented financial metrics.
|
||||
- Implemented Monte Carlo simulation engine.
|
||||
- Cleaned up reporting contexts.
|
||||
- Consolidated migration history.
|
||||
- Added migration script and updated entrypoint.
|
||||
- Configured test coverage.
|
||||
- Standardized colors and typography.
|
||||
- Improved navigation with chevron buttons.
|
||||
- Established test suites with coverage.
|
||||
- Configured CI pipelines for tests and security.
|
||||
- Added deployment automation with Docker and Kubernetes.
|
||||
- Completed monitoring instrumentation.
|
||||
- Implemented performance monitoring.
|
||||
- Added metric storage and endpoints.
|
||||
- Created middleware for metrics.
|
||||
- Extended monitoring router.
|
||||
- Added migration for metrics table.
|
||||
- Completed concurrent testing.
|
||||
- Implemented deployment automation.
|
||||
- Set up Kubernetes manifests.
|
||||
- Configured CI/CD workflows.
|
||||
- Documented deployment processes.
|
||||
- Validated deployment setup.
|
||||
|
||||
## 2025-11-10
|
||||
|
||||
- Added tests for guard dependencies.
|
||||
- Added integration tests for authorization.
|
||||
- Implemented admin bootstrap settings.
|
||||
- Retired old RBAC plan document.
|
||||
- Completed authentication and RBAC features.
|
||||
- Documented import/export field mappings.
|
||||
- Added import service for CSV/Excel.
|
||||
- Expanded import workflow with previews and commits.
|
||||
- Added audit logging for imports/exports.
|
||||
|
||||
## 2025-11-09
|
||||
|
||||
- Captured implementation status and roadmap.
|
||||
- Added core database models and migration setup.
|
||||
- Introduced repository helpers for data operations.
|
||||
- Added tests for repository behaviors.
|
||||
- Exposed CRUD APIs for projects and scenarios.
|
||||
- Connected routers to HTML views.
|
||||
- Implemented client-side enhancements.
|
||||
- Added scenario comparison validator.
|
||||
- Delivered new dashboard experience.
|
||||
- Extended repositories with utilities.
|
||||
- Updated detail pages with new visuals.
|
||||
- Fixed route registration issues.
|
||||
- Added end-to-end tests for lifecycles.
|
||||
- Updated template responses.
|
||||
- Introduced security utilities.
|
||||
- Added authentication routes.
|
||||
- Implemented session middleware.
|
||||
- Delivered seeding utilities.
|
||||
- Secured routers with RBAC.
|
||||
1
config/__init__.py
Normal file
1
config/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Configuration package."""
|
||||
@@ -1,5 +1,4 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.engine import URL
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
@@ -12,12 +11,21 @@ def _build_database_url() -> str:
|
||||
"""Construct the SQLAlchemy database URL from granular environment vars.
|
||||
|
||||
Falls back to `DATABASE_URL` for backward compatibility.
|
||||
Supports SQLite when CALMINER_USE_SQLITE is set.
|
||||
"""
|
||||
|
||||
legacy_url = os.environ.get("DATABASE_URL")
|
||||
if legacy_url:
|
||||
legacy_url = os.environ.get("DATABASE_URL", "")
|
||||
if legacy_url and legacy_url.strip() != "":
|
||||
return legacy_url
|
||||
|
||||
use_sqlite = os.environ.get("CALMINER_USE_SQLITE", "").lower() in ("true", "1", "yes")
|
||||
if use_sqlite:
|
||||
# Use SQLite database
|
||||
db_path = os.environ.get("DATABASE_PATH", "./data/calminer.db")
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
||||
return f"sqlite:///{db_path}"
|
||||
|
||||
driver = os.environ.get("DATABASE_DRIVER", "postgresql")
|
||||
host = os.environ.get("DATABASE_HOST")
|
||||
port = os.environ.get("DATABASE_PORT", "5432")
|
||||
@@ -42,17 +50,12 @@ def _build_database_url() -> str:
|
||||
f"granular variables ({', '.join(missing)})"
|
||||
)
|
||||
|
||||
url = URL.create(
|
||||
drivername=driver,
|
||||
username=user,
|
||||
password=password,
|
||||
host=host,
|
||||
port=int(port) if port else None,
|
||||
database=database,
|
||||
)
|
||||
|
||||
url = f"{driver}://{user}:{password}@{host}"
|
||||
if port:
|
||||
url += f":{port}"
|
||||
url += f"/{database}"
|
||||
if schema:
|
||||
url = url.set(query={"options": f"-csearch_path={schema}"})
|
||||
url += f"?options=-csearch_path={schema}"
|
||||
|
||||
return str(url)
|
||||
|
||||
@@ -60,5 +63,21 @@ def _build_database_url() -> str:
|
||||
DATABASE_URL = _build_database_url()
|
||||
|
||||
engine = create_engine(DATABASE_URL, echo=True, future=True)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
# Avoid expiring ORM objects on commit so that objects returned from UnitOfWork
|
||||
# remain usable for the duration of the request cycle without causing
|
||||
# DetachedInstanceError when accessed after the session commits.
|
||||
SessionLocal = sessionmaker(
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
233
config/settings.py
Normal file
233
config/settings.py
Normal file
@@ -0,0 +1,233 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from functools import lru_cache
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from services.pricing import PricingMetadata
|
||||
|
||||
from services.security import JWTSettings
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AdminBootstrapSettings:
|
||||
"""Default administrator bootstrap configuration."""
|
||||
|
||||
email: str
|
||||
username: str
|
||||
password: str
|
||||
roles: tuple[str, ...]
|
||||
force_reset: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class SessionSettings:
|
||||
"""Cookie and header configuration for session token transport."""
|
||||
|
||||
access_cookie_name: str
|
||||
refresh_cookie_name: str
|
||||
cookie_secure: bool
|
||||
cookie_domain: Optional[str]
|
||||
cookie_path: str
|
||||
header_name: str
|
||||
header_prefix: str
|
||||
allow_header_fallback: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class Settings:
|
||||
"""Application configuration sourced from environment variables."""
|
||||
|
||||
jwt_secret_key: str = "change-me"
|
||||
jwt_algorithm: str = "HS256"
|
||||
jwt_access_token_minutes: int = 15
|
||||
jwt_refresh_token_days: int = 7
|
||||
session_access_cookie_name: str = "calminer_access_token"
|
||||
session_refresh_cookie_name: str = "calminer_refresh_token"
|
||||
session_cookie_secure: bool = False
|
||||
session_cookie_domain: Optional[str] = None
|
||||
session_cookie_path: str = "/"
|
||||
session_header_name: str = "Authorization"
|
||||
session_header_prefix: str = "Bearer"
|
||||
session_allow_header_fallback: bool = True
|
||||
admin_email: str = "admin@calminer.local"
|
||||
admin_username: str = "admin"
|
||||
admin_password: str = "ChangeMe123!"
|
||||
admin_roles: tuple[str, ...] = ("admin",)
|
||||
admin_force_reset: bool = False
|
||||
pricing_default_payable_pct: float = 100.0
|
||||
pricing_default_currency: str | None = "USD"
|
||||
pricing_moisture_threshold_pct: float = 8.0
|
||||
pricing_moisture_penalty_per_pct: float = 0.0
|
||||
|
||||
@classmethod
|
||||
def from_environment(cls) -> "Settings":
|
||||
"""Construct settings from environment variables."""
|
||||
|
||||
return cls(
|
||||
jwt_secret_key=os.getenv("CALMINER_JWT_SECRET", "change-me"),
|
||||
jwt_algorithm=os.getenv("CALMINER_JWT_ALGORITHM", "HS256"),
|
||||
jwt_access_token_minutes=cls._int_from_env(
|
||||
"CALMINER_JWT_ACCESS_MINUTES", 15
|
||||
),
|
||||
jwt_refresh_token_days=cls._int_from_env(
|
||||
"CALMINER_JWT_REFRESH_DAYS", 7
|
||||
),
|
||||
session_access_cookie_name=os.getenv(
|
||||
"CALMINER_SESSION_ACCESS_COOKIE", "calminer_access_token"
|
||||
),
|
||||
session_refresh_cookie_name=os.getenv(
|
||||
"CALMINER_SESSION_REFRESH_COOKIE", "calminer_refresh_token"
|
||||
),
|
||||
session_cookie_secure=cls._bool_from_env(
|
||||
"CALMINER_SESSION_COOKIE_SECURE", False
|
||||
),
|
||||
session_cookie_domain=os.getenv("CALMINER_SESSION_COOKIE_DOMAIN"),
|
||||
session_cookie_path=os.getenv("CALMINER_SESSION_COOKIE_PATH", "/"),
|
||||
session_header_name=os.getenv(
|
||||
"CALMINER_SESSION_HEADER_NAME", "Authorization"
|
||||
),
|
||||
session_header_prefix=os.getenv(
|
||||
"CALMINER_SESSION_HEADER_PREFIX", "Bearer"
|
||||
),
|
||||
session_allow_header_fallback=cls._bool_from_env(
|
||||
"CALMINER_SESSION_ALLOW_HEADER_FALLBACK", True
|
||||
),
|
||||
admin_email=os.getenv(
|
||||
"CALMINER_SEED_ADMIN_EMAIL", "admin@calminer.local"
|
||||
),
|
||||
admin_username=os.getenv(
|
||||
"CALMINER_SEED_ADMIN_USERNAME", "admin"
|
||||
),
|
||||
admin_password=os.getenv(
|
||||
"CALMINER_SEED_ADMIN_PASSWORD", "ChangeMe123!"
|
||||
),
|
||||
admin_roles=cls._parse_admin_roles(
|
||||
os.getenv("CALMINER_SEED_ADMIN_ROLES")
|
||||
),
|
||||
admin_force_reset=cls._bool_from_env(
|
||||
"CALMINER_SEED_FORCE", False
|
||||
),
|
||||
pricing_default_payable_pct=cls._float_from_env(
|
||||
"CALMINER_PRICING_DEFAULT_PAYABLE_PCT", 100.0
|
||||
),
|
||||
pricing_default_currency=cls._optional_str(
|
||||
"CALMINER_PRICING_DEFAULT_CURRENCY", "USD"
|
||||
),
|
||||
pricing_moisture_threshold_pct=cls._float_from_env(
|
||||
"CALMINER_PRICING_MOISTURE_THRESHOLD_PCT", 8.0
|
||||
),
|
||||
pricing_moisture_penalty_per_pct=cls._float_from_env(
|
||||
"CALMINER_PRICING_MOISTURE_PENALTY_PER_PCT", 0.0
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _int_from_env(name: str, default: int) -> int:
|
||||
raw_value = os.getenv(name)
|
||||
if raw_value is None:
|
||||
return default
|
||||
try:
|
||||
return int(raw_value)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@staticmethod
|
||||
def _bool_from_env(name: str, default: bool) -> bool:
|
||||
raw_value = os.getenv(name)
|
||||
if raw_value is None:
|
||||
return default
|
||||
lowered = raw_value.strip().lower()
|
||||
if lowered in {"1", "true", "yes", "on"}:
|
||||
return True
|
||||
if lowered in {"0", "false", "no", "off"}:
|
||||
return False
|
||||
return default
|
||||
|
||||
@staticmethod
|
||||
def _parse_admin_roles(raw_value: str | None) -> tuple[str, ...]:
|
||||
if not raw_value:
|
||||
return ("admin",)
|
||||
parts = [segment.strip()
|
||||
for segment in raw_value.split(",") if segment.strip()]
|
||||
if "admin" not in parts:
|
||||
parts.insert(0, "admin")
|
||||
seen: set[str] = set()
|
||||
ordered: list[str] = []
|
||||
for role_name in parts:
|
||||
if role_name not in seen:
|
||||
ordered.append(role_name)
|
||||
seen.add(role_name)
|
||||
return tuple(ordered)
|
||||
|
||||
@staticmethod
|
||||
def _float_from_env(name: str, default: float) -> float:
|
||||
raw_value = os.getenv(name)
|
||||
if raw_value is None:
|
||||
return default
|
||||
try:
|
||||
return float(raw_value)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@staticmethod
|
||||
def _optional_str(name: str, default: str | None = None) -> str | None:
|
||||
raw_value = os.getenv(name)
|
||||
if raw_value is None or raw_value.strip() == "":
|
||||
return default
|
||||
return raw_value.strip()
|
||||
|
||||
def jwt_settings(self) -> JWTSettings:
|
||||
"""Build runtime JWT settings compatible with token helpers."""
|
||||
|
||||
return JWTSettings(
|
||||
secret_key=self.jwt_secret_key,
|
||||
algorithm=self.jwt_algorithm,
|
||||
access_token_ttl=timedelta(minutes=self.jwt_access_token_minutes),
|
||||
refresh_token_ttl=timedelta(days=self.jwt_refresh_token_days),
|
||||
)
|
||||
|
||||
def session_settings(self) -> SessionSettings:
|
||||
"""Provide transport configuration for session tokens."""
|
||||
|
||||
return SessionSettings(
|
||||
access_cookie_name=self.session_access_cookie_name,
|
||||
refresh_cookie_name=self.session_refresh_cookie_name,
|
||||
cookie_secure=self.session_cookie_secure,
|
||||
cookie_domain=self.session_cookie_domain,
|
||||
cookie_path=self.session_cookie_path,
|
||||
header_name=self.session_header_name,
|
||||
header_prefix=self.session_header_prefix,
|
||||
allow_header_fallback=self.session_allow_header_fallback,
|
||||
)
|
||||
|
||||
def admin_bootstrap_settings(self) -> AdminBootstrapSettings:
|
||||
"""Return configured admin bootstrap settings."""
|
||||
|
||||
return AdminBootstrapSettings(
|
||||
email=self.admin_email,
|
||||
username=self.admin_username,
|
||||
password=self.admin_password,
|
||||
roles=self.admin_roles,
|
||||
force_reset=self.admin_force_reset,
|
||||
)
|
||||
|
||||
def pricing_metadata(self) -> PricingMetadata:
|
||||
"""Build pricing metadata defaults."""
|
||||
|
||||
return PricingMetadata(
|
||||
default_payable_pct=self.pricing_default_payable_pct,
|
||||
default_currency=self.pricing_default_currency,
|
||||
moisture_threshold_pct=self.pricing_moisture_threshold_pct,
|
||||
moisture_penalty_per_pct=self.pricing_moisture_penalty_per_pct,
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_settings() -> Settings:
|
||||
"""Return cached application settings."""
|
||||
|
||||
return Settings.from_environment()
|
||||
400
dependencies.py
Normal file
400
dependencies.py
Normal file
@@ -0,0 +1,400 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Iterable, Generator
|
||||
|
||||
from fastapi import Depends, HTTPException, Request, status
|
||||
|
||||
from config.settings import Settings, get_settings
|
||||
from models import Project, Role, Scenario, User
|
||||
from services.authorization import (
|
||||
ensure_project_access as ensure_project_access_helper,
|
||||
ensure_scenario_access as ensure_scenario_access_helper,
|
||||
ensure_scenario_in_project as ensure_scenario_in_project_helper,
|
||||
)
|
||||
from services.exceptions import AuthorizationError, EntityNotFoundError
|
||||
from services.security import JWTSettings
|
||||
from services.session import (
|
||||
AuthSession,
|
||||
SessionStrategy,
|
||||
SessionTokens,
|
||||
build_session_strategy,
|
||||
extract_session_tokens,
|
||||
)
|
||||
from services.unit_of_work import UnitOfWork
|
||||
from services.importers import ImportIngestionService
|
||||
from services.pricing import PricingMetadata
|
||||
from services.navigation import NavigationService
|
||||
from services.scenario_evaluation import ScenarioPricingConfig, ScenarioPricingEvaluator
|
||||
from services.repositories import pricing_settings_to_metadata
|
||||
|
||||
|
||||
def get_unit_of_work() -> Generator[UnitOfWork, None, None]:
|
||||
"""FastAPI dependency yielding a unit-of-work instance."""
|
||||
|
||||
with UnitOfWork() as uow:
|
||||
yield uow
|
||||
|
||||
|
||||
_IMPORT_INGESTION_SERVICE = ImportIngestionService(lambda: UnitOfWork())
|
||||
|
||||
|
||||
def get_import_ingestion_service() -> ImportIngestionService:
|
||||
"""Provide singleton import ingestion service."""
|
||||
|
||||
return _IMPORT_INGESTION_SERVICE
|
||||
|
||||
|
||||
def get_application_settings() -> Settings:
|
||||
"""Provide cached application settings instance."""
|
||||
|
||||
return get_settings()
|
||||
|
||||
|
||||
def get_pricing_metadata(
|
||||
settings: Settings = Depends(get_application_settings),
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> PricingMetadata:
|
||||
"""Return pricing metadata defaults sourced from persisted pricing settings."""
|
||||
|
||||
stored = uow.get_pricing_metadata()
|
||||
if stored is not None:
|
||||
return stored
|
||||
|
||||
fallback = settings.pricing_metadata()
|
||||
seed_result = uow.ensure_default_pricing_settings(metadata=fallback)
|
||||
return pricing_settings_to_metadata(seed_result.settings)
|
||||
|
||||
|
||||
def get_navigation_service(
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> NavigationService:
|
||||
if not uow.navigation:
|
||||
raise RuntimeError("Navigation repository is not initialised")
|
||||
return NavigationService(uow.navigation)
|
||||
|
||||
|
||||
def get_pricing_evaluator(
|
||||
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||
) -> ScenarioPricingEvaluator:
|
||||
"""Provide a configured scenario pricing evaluator."""
|
||||
|
||||
return ScenarioPricingEvaluator(ScenarioPricingConfig(metadata=metadata))
|
||||
|
||||
|
||||
def get_jwt_settings() -> JWTSettings:
|
||||
"""Provide JWT runtime configuration derived from settings."""
|
||||
|
||||
return get_settings().jwt_settings()
|
||||
|
||||
|
||||
def get_session_strategy(
|
||||
settings: Settings = Depends(get_application_settings),
|
||||
) -> SessionStrategy:
|
||||
"""Yield configured session transport strategy."""
|
||||
|
||||
return build_session_strategy(settings.session_settings())
|
||||
|
||||
|
||||
def get_session_tokens(
|
||||
request: Request,
|
||||
strategy: SessionStrategy = Depends(get_session_strategy),
|
||||
) -> SessionTokens:
|
||||
"""Extract raw session tokens from the incoming request."""
|
||||
|
||||
existing = getattr(request.state, "auth_session", None)
|
||||
if isinstance(existing, AuthSession):
|
||||
return existing.tokens
|
||||
|
||||
tokens = extract_session_tokens(request, strategy)
|
||||
request.state.auth_session = AuthSession(tokens=tokens)
|
||||
return tokens
|
||||
|
||||
|
||||
def get_auth_session(
|
||||
request: Request,
|
||||
tokens: SessionTokens = Depends(get_session_tokens),
|
||||
) -> AuthSession:
|
||||
"""Provide authentication session context for the current request."""
|
||||
|
||||
existing = getattr(request.state, "auth_session", None)
|
||||
if isinstance(existing, AuthSession):
|
||||
return existing
|
||||
|
||||
if tokens.is_empty:
|
||||
session = AuthSession.anonymous()
|
||||
else:
|
||||
session = AuthSession(tokens=tokens)
|
||||
request.state.auth_session = session
|
||||
return session
|
||||
|
||||
|
||||
def get_current_user(
|
||||
session: AuthSession = Depends(get_auth_session),
|
||||
) -> User | None:
|
||||
"""Return the current authenticated user if present."""
|
||||
|
||||
return session.user
|
||||
|
||||
|
||||
def require_current_user(
|
||||
session: AuthSession = Depends(get_auth_session),
|
||||
) -> User:
|
||||
"""Ensure that a request is authenticated and return the user context."""
|
||||
|
||||
if session.user is None or session.tokens.is_empty:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required.",
|
||||
)
|
||||
return session.user
|
||||
|
||||
|
||||
def require_authenticated_user(
|
||||
user: User = Depends(require_current_user),
|
||||
) -> User:
|
||||
"""Ensure the current user account is active."""
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="User account is disabled.",
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
def require_authenticated_user_html(
|
||||
request: Request,
|
||||
session: AuthSession = Depends(get_auth_session),
|
||||
) -> User:
|
||||
"""HTML-aware authenticated dependency that redirects anonymous sessions."""
|
||||
|
||||
user = session.user
|
||||
if user is None or session.tokens.is_empty:
|
||||
login_url = str(request.url_for("auth.login_form"))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
headers={"Location": login_url},
|
||||
)
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="User account is disabled.",
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
def _user_role_names(user: User) -> set[str]:
|
||||
roles: Iterable[Role] = getattr(user, "roles", []) or []
|
||||
return {role.name for role in roles}
|
||||
|
||||
|
||||
def require_roles(*roles: str) -> Callable[[User], User]:
|
||||
"""Dependency factory enforcing membership in one of the given roles."""
|
||||
|
||||
required = tuple(role.strip() for role in roles if role.strip())
|
||||
if not required:
|
||||
raise ValueError("require_roles requires at least one role name")
|
||||
|
||||
def _dependency(user: User = Depends(require_authenticated_user)) -> User:
|
||||
if user.is_superuser:
|
||||
return user
|
||||
|
||||
role_names = _user_role_names(user)
|
||||
if not any(role in role_names for role in required):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions for this action.",
|
||||
)
|
||||
return user
|
||||
|
||||
return _dependency
|
||||
|
||||
|
||||
def require_any_role(*roles: str) -> Callable[[User], User]:
|
||||
"""Alias of require_roles for readability in some contexts."""
|
||||
|
||||
return require_roles(*roles)
|
||||
|
||||
|
||||
def require_roles_html(*roles: str) -> Callable[[Request], User]:
|
||||
"""Ensure user is authenticated for HTML responses; redirect anonymous to login."""
|
||||
|
||||
required = tuple(role.strip() for role in roles if role.strip())
|
||||
if not required:
|
||||
raise ValueError("require_roles_html requires at least one role name")
|
||||
|
||||
def _dependency(
|
||||
request: Request,
|
||||
session: AuthSession = Depends(get_auth_session),
|
||||
) -> User:
|
||||
user = session.user
|
||||
if user is None:
|
||||
login_url = str(request.url_for("auth.login_form"))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
headers={"Location": login_url},
|
||||
)
|
||||
|
||||
if user.is_superuser:
|
||||
return user
|
||||
|
||||
role_names = _user_role_names(user)
|
||||
if not any(role in role_names for role in required):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions for this action.",
|
||||
)
|
||||
return user
|
||||
|
||||
return _dependency
|
||||
|
||||
|
||||
def require_any_role_html(*roles: str) -> Callable[[Request], User]:
|
||||
"""Alias of require_roles_html for readability."""
|
||||
|
||||
return require_roles_html(*roles)
|
||||
|
||||
|
||||
def require_project_resource(
|
||||
*,
|
||||
require_manage: bool = False,
|
||||
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||
) -> Callable[[int], Project]:
|
||||
"""Dependency factory that resolves a project with authorization checks."""
|
||||
|
||||
def _dependency(
|
||||
project_id: int,
|
||||
user: User = Depends(user_dependency),
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> Project:
|
||||
try:
|
||||
return ensure_project_access_helper(
|
||||
uow,
|
||||
project_id=project_id,
|
||||
user=user,
|
||||
require_manage=require_manage,
|
||||
)
|
||||
except EntityNotFoundError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
except AuthorizationError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return _dependency
|
||||
|
||||
|
||||
def require_scenario_resource(
|
||||
*,
|
||||
require_manage: bool = False,
|
||||
with_children: bool = False,
|
||||
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||
) -> Callable[[int], Scenario]:
|
||||
"""Dependency factory that resolves a scenario with authorization checks."""
|
||||
|
||||
def _dependency(
|
||||
scenario_id: int,
|
||||
user: User = Depends(user_dependency),
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> Scenario:
|
||||
try:
|
||||
return ensure_scenario_access_helper(
|
||||
uow,
|
||||
scenario_id=scenario_id,
|
||||
user=user,
|
||||
require_manage=require_manage,
|
||||
with_children=with_children,
|
||||
)
|
||||
except EntityNotFoundError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
except AuthorizationError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return _dependency
|
||||
|
||||
|
||||
def require_project_scenario_resource(
|
||||
*,
|
||||
require_manage: bool = False,
|
||||
with_children: bool = False,
|
||||
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||
) -> Callable[[int, int], Scenario]:
|
||||
"""Dependency factory ensuring a scenario belongs to the given project and is accessible."""
|
||||
|
||||
def _dependency(
|
||||
project_id: int,
|
||||
scenario_id: int,
|
||||
user: User = Depends(user_dependency),
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> Scenario:
|
||||
try:
|
||||
return ensure_scenario_in_project_helper(
|
||||
uow,
|
||||
project_id=project_id,
|
||||
scenario_id=scenario_id,
|
||||
user=user,
|
||||
require_manage=require_manage,
|
||||
with_children=with_children,
|
||||
)
|
||||
except EntityNotFoundError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
except AuthorizationError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return _dependency
|
||||
|
||||
|
||||
def require_project_resource_html(
|
||||
*, require_manage: bool = False
|
||||
) -> Callable[[int], Project]:
|
||||
"""HTML-aware project loader that redirects anonymous sessions."""
|
||||
|
||||
return require_project_resource(
|
||||
require_manage=require_manage,
|
||||
user_dependency=require_authenticated_user_html,
|
||||
)
|
||||
|
||||
|
||||
def require_scenario_resource_html(
|
||||
*,
|
||||
require_manage: bool = False,
|
||||
with_children: bool = False,
|
||||
) -> Callable[[int], Scenario]:
|
||||
"""HTML-aware scenario loader that redirects anonymous sessions."""
|
||||
|
||||
return require_scenario_resource(
|
||||
require_manage=require_manage,
|
||||
with_children=with_children,
|
||||
user_dependency=require_authenticated_user_html,
|
||||
)
|
||||
|
||||
|
||||
def require_project_scenario_resource_html(
|
||||
*,
|
||||
require_manage: bool = False,
|
||||
with_children: bool = False,
|
||||
) -> Callable[[int, int], Scenario]:
|
||||
"""HTML-aware project-scenario loader redirecting anonymous sessions."""
|
||||
|
||||
return require_project_scenario_resource(
|
||||
require_manage=require_manage,
|
||||
with_children=with_children,
|
||||
user_dependency=require_authenticated_user_html,
|
||||
)
|
||||
59
docker-compose.override.yml
Normal file
59
docker-compose.override.yml
Normal file
@@ -0,0 +1,59 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
APT_CACHE_URL: ${APT_CACHE_URL:-}
|
||||
environment:
|
||||
- ENVIRONMENT=development
|
||||
- DEBUG=true
|
||||
- LOG_LEVEL=DEBUG
|
||||
# Override database to use local postgres service
|
||||
- DATABASE_HOST=postgres
|
||||
- DATABASE_PORT=5432
|
||||
- DATABASE_USER=calminer
|
||||
- DATABASE_PASSWORD=calminer_password
|
||||
- DATABASE_NAME=calminer_db
|
||||
- DATABASE_DRIVER=postgresql
|
||||
# Development-specific settings
|
||||
- CALMINER_EXPORT_MAX_ROWS=1000
|
||||
- CALMINER_IMPORT_MAX_ROWS=10000
|
||||
volumes:
|
||||
# Mount source code for live reloading (if using --reload)
|
||||
- .:/app:ro
|
||||
# Override logs volume to local for easier access
|
||||
- ./logs:/app/logs
|
||||
ports:
|
||||
- "8003:8003"
|
||||
# Override command for development with reload
|
||||
command:
|
||||
[
|
||||
"main:app",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"8003",
|
||||
"--reload",
|
||||
"--workers",
|
||||
"1",
|
||||
]
|
||||
depends_on:
|
||||
- postgres
|
||||
restart: unless-stopped
|
||||
|
||||
postgres:
|
||||
environment:
|
||||
- POSTGRES_USER=calminer
|
||||
- POSTGRES_PASSWORD=calminer_password
|
||||
- POSTGRES_DB=calminer_db
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
73
docker-compose.prod.yml
Normal file
73
docker-compose.prod.yml
Normal file
@@ -0,0 +1,73 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
app:
|
||||
image: git.allucanget.biz/allucanget/calminer:latest
|
||||
environment:
|
||||
- ENVIRONMENT=production
|
||||
- DEBUG=false
|
||||
- LOG_LEVEL=WARNING
|
||||
# Database configuration - must be provided externally
|
||||
- DATABASE_HOST=${DATABASE_HOST}
|
||||
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||
- DATABASE_USER=${DATABASE_USER}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_NAME=${DATABASE_NAME}
|
||||
- DATABASE_DRIVER=postgresql
|
||||
# Production-specific settings
|
||||
- CALMINER_EXPORT_MAX_ROWS=100000
|
||||
- CALMINER_IMPORT_MAX_ROWS=100000
|
||||
- CALMINER_EXPORT_METADATA=true
|
||||
- CALMINER_IMPORT_STAGING_TTL=3600
|
||||
ports:
|
||||
- "8003:8003"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
# Production health checks
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8003/health"]
|
||||
interval: 60s
|
||||
timeout: 30s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
# Resource limits for production
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "1.0"
|
||||
memory: 1G
|
||||
reservations:
|
||||
cpus: "0.5"
|
||||
memory: 512M
|
||||
|
||||
postgres:
|
||||
environment:
|
||||
- POSTGRES_USER=${DATABASE_USER}
|
||||
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||
- POSTGRES_DB=${DATABASE_NAME}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
# Production postgres health check
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${DATABASE_USER} -d ${DATABASE_NAME}"]
|
||||
interval: 60s
|
||||
timeout: 30s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
# Resource limits for postgres
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "1.0"
|
||||
memory: 2G
|
||||
reservations:
|
||||
cpus: "0.5"
|
||||
memory: 1G
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
62
docker-compose.staging.yml
Normal file
62
docker-compose.staging.yml
Normal file
@@ -0,0 +1,62 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
APT_CACHE_URL: ${APT_CACHE_URL:-}
|
||||
environment:
|
||||
- ENVIRONMENT=staging
|
||||
- DEBUG=false
|
||||
- LOG_LEVEL=INFO
|
||||
# Database configuration - can be overridden by external env
|
||||
- DATABASE_HOST=${DATABASE_HOST:-postgres}
|
||||
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||
- DATABASE_USER=${DATABASE_USER:-calminer}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_NAME=${DATABASE_NAME:-calminer_db}
|
||||
- DATABASE_DRIVER=postgresql
|
||||
# Staging-specific settings
|
||||
- CALMINER_EXPORT_MAX_ROWS=50000
|
||||
- CALMINER_IMPORT_MAX_ROWS=50000
|
||||
- CALMINER_EXPORT_METADATA=true
|
||||
- CALMINER_IMPORT_STAGING_TTL=600
|
||||
ports:
|
||||
- "8003:8003"
|
||||
depends_on:
|
||||
- postgres
|
||||
restart: unless-stopped
|
||||
# Health check for staging
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8003/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
postgres:
|
||||
environment:
|
||||
- POSTGRES_USER=${DATABASE_USER:-calminer}
|
||||
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||
- POSTGRES_DB=${DATABASE_NAME:-calminer_db}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
# Health check for postgres
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"pg_isready -U ${DATABASE_USER:-calminer} -d ${DATABASE_NAME:-calminer_db}",
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
36
docker-compose.yml
Normal file
36
docker-compose.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8003:8003"
|
||||
environment:
|
||||
# Environment-specific variables should be set in override files
|
||||
- ENVIRONMENT=${ENVIRONMENT:-production}
|
||||
- DATABASE_HOST=${DATABASE_HOST:-postgres}
|
||||
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||
- DATABASE_USER=${DATABASE_USER}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_NAME=${DATABASE_NAME}
|
||||
- DATABASE_DRIVER=postgresql
|
||||
depends_on:
|
||||
- postgres
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
restart: unless-stopped
|
||||
|
||||
postgres:
|
||||
image: postgres:17
|
||||
environment:
|
||||
- POSTGRES_USER=${DATABASE_USER}
|
||||
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||
- POSTGRES_DB=${DATABASE_NAME}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
@@ -1,62 +0,0 @@
|
||||
---
|
||||
title: "01 — Introduction and Goals"
|
||||
description: "System purpose, stakeholders, and high-level goals; project introduction and business/technical goals."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 01 — Introduction and Goals
|
||||
|
||||
## Purpose
|
||||
|
||||
CalMiner aims to provide a comprehensive platform for mining project scenario analysis, enabling stakeholders to make informed decisions based on data-driven insights.
|
||||
|
||||
## Stakeholders
|
||||
|
||||
- **Project Managers**: Require tools for scenario planning and risk assessment.
|
||||
- **Data Analysts**: Need access to historical data and simulation results for analysis.
|
||||
- **Executives**: Seek high-level insights and reporting for strategic decision-making.
|
||||
|
||||
## High-Level Goals
|
||||
|
||||
1. **Comprehensive Scenario Analysis**: Enable users to create and analyze multiple project scenarios to assess risks and opportunities.
|
||||
2. **Data-Driven Decision Making**: Provide stakeholders with the insights needed to make informed decisions based on simulation results.
|
||||
3. **User-Friendly Interface**: Ensure the platform is accessible and easy to use for all stakeholders, regardless of technical expertise.
|
||||
|
||||
## System Overview
|
||||
|
||||
FastAPI application that collects mining project inputs, persists scenario-specific records, and surfaces aggregated insights. The platform targets Monte Carlo driven planning, with deterministic CRUD features in place and simulation logic staged for future work.
|
||||
|
||||
Frontend components are server-rendered Jinja2 templates, with Chart.js powering the dashboard visualization. The backend leverages SQLAlchemy for ORM mapping to a PostgreSQL database.
|
||||
|
||||
### Runtime Flow
|
||||
|
||||
1. Users navigate to form templates or API clients to manage scenarios, parameters, and operational data.
|
||||
2. FastAPI routers validate payloads with Pydantic models, then delegate to SQLAlchemy sessions for persistence.
|
||||
3. Simulation runs (placeholder `services/simulation.py`) will consume stored parameters to emit iteration results via `/api/simulations/run`.
|
||||
4. Reporting requests POST simulation outputs to `/api/reporting/summary`; the reporting service calculates aggregates (count, min/max, mean, median, percentiles, standard deviation, variance, and tail-risk metrics at the 95% confidence level).
|
||||
5. `templates/Dashboard.html` fetches summaries, renders metric cards, and plots distribution charts with Chart.js for stakeholder review.
|
||||
|
||||
### Current implementation status (summary)
|
||||
|
||||
- Currency normalization, simulation scaffold, and reporting service exist; see [quickstart](../quickstart.md) for full status and migration instructions.
|
||||
|
||||
## MVP Features (migrated)
|
||||
|
||||
The following MVP features and priorities were defined during initial planning.
|
||||
|
||||
### Prioritized Features
|
||||
|
||||
1. **Scenario Creation and Management** (High Priority): Allow users to create, edit, and delete scenarios. Rationale: Core functionality for what-if analysis.
|
||||
1. **Parameter Input and Validation** (High Priority): Input process parameters with validation. Rationale: Ensures data integrity for simulations.
|
||||
1. **Monte Carlo Simulation Run** (High Priority): Execute simulations and store results. Rationale: Key differentiator for risk analysis.
|
||||
1. **Basic Reporting** (Medium Priority): Display NPV, IRR, EBITDA from simulation results. Rationale: Essential for decision-making.
|
||||
1. **Cost Tracking Dashboard** (Medium Priority): Visualize CAPEX and OPEX. Rationale: Helps monitor expenses.
|
||||
1. **Consumption Monitoring** (Low Priority): Track resource consumption. Rationale: Useful for optimization.
|
||||
1. **User Authentication** (Medium Priority): Basic login/logout. Rationale: Security for multi-user access.
|
||||
1. **Export Results** (Low Priority): Export simulation data to CSV/PDF. Rationale: For external analysis.
|
||||
|
||||
### Rationale for Prioritization
|
||||
|
||||
- High: Core simulation and scenario features first.
|
||||
- Medium: Reporting and auth for usability.
|
||||
- Low: Nice-to-haves after basics.
|
||||
@@ -1,139 +0,0 @@
|
||||
---
|
||||
title: "02 — Architecture Constraints"
|
||||
description: "Document imposed constraints: technical, organizational, regulatory, and environmental constraints that affect architecture decisions."
|
||||
status: skeleton
|
||||
---
|
||||
|
||||
# 02 — Architecture Constraints
|
||||
|
||||
## Technical Constraints
|
||||
|
||||
> e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates.
|
||||
|
||||
## Organizational Constraints
|
||||
|
||||
> e.g., team skillsets, development workflows, CI/CD pipelines.
|
||||
|
||||
## Regulatory Constraints
|
||||
|
||||
> e.g., data privacy laws, industry standards.
|
||||
|
||||
## Environmental Constraints
|
||||
|
||||
> e.g., deployment environments, cloud provider limitations.
|
||||
|
||||
## Performance Constraints
|
||||
|
||||
> e.g., response time requirements, scalability needs.
|
||||
|
||||
## Security Constraints
|
||||
|
||||
> e.g., authentication mechanisms, data encryption standards.
|
||||
|
||||
## Budgetary Constraints
|
||||
|
||||
> e.g., licensing costs, infrastructure budgets.
|
||||
|
||||
## Time Constraints
|
||||
|
||||
> e.g., project deadlines, release schedules.
|
||||
|
||||
## Interoperability Constraints
|
||||
|
||||
> e.g., integration with existing systems, third-party services.
|
||||
|
||||
## Maintainability Constraints
|
||||
|
||||
> e.g., code modularity, documentation standards.
|
||||
|
||||
## Usability Constraints
|
||||
|
||||
> e.g., user interface design principles, accessibility requirements.
|
||||
|
||||
## Data Constraints
|
||||
|
||||
> e.g., data storage formats, data retention policies.
|
||||
|
||||
## Deployment Constraints
|
||||
|
||||
> e.g., deployment environments, cloud provider limitations.
|
||||
|
||||
## Testing Constraints
|
||||
|
||||
> e.g., testing frameworks, test coverage requirements.
|
||||
|
||||
## Localization Constraints
|
||||
|
||||
> e.g., multi-language support, regional settings.
|
||||
|
||||
## Versioning Constraints
|
||||
|
||||
> e.g., API versioning strategies, backward compatibility.
|
||||
|
||||
## Monitoring Constraints
|
||||
|
||||
> e.g., logging standards, performance monitoring tools.
|
||||
|
||||
## Backup and Recovery Constraints
|
||||
|
||||
> e.g., data backup frequency, disaster recovery plans.
|
||||
|
||||
## Development Constraints
|
||||
|
||||
> e.g., coding languages, frameworks, libraries to be used or avoided.
|
||||
|
||||
## Collaboration Constraints
|
||||
|
||||
> e.g., communication tools, collaboration platforms.
|
||||
|
||||
## Documentation Constraints
|
||||
|
||||
> e.g., documentation tools, style guides.
|
||||
|
||||
## Training Constraints
|
||||
|
||||
> e.g., training programs, skill development initiatives.
|
||||
|
||||
## Support Constraints
|
||||
|
||||
> e.g., support channels, response time expectations.
|
||||
|
||||
## Legal Constraints
|
||||
|
||||
> e.g., compliance requirements, intellectual property considerations.
|
||||
|
||||
## Ethical Constraints
|
||||
|
||||
> e.g., ethical considerations in data usage, user privacy.
|
||||
|
||||
## Environmental Impact Constraints
|
||||
|
||||
> e.g., energy consumption considerations, sustainability goals.
|
||||
|
||||
## Innovation Constraints
|
||||
|
||||
> e.g., limitations on adopting new technologies, risk tolerance for experimentation.
|
||||
|
||||
## Cultural Constraints
|
||||
|
||||
> e.g., organizational culture, team dynamics affecting development practices.
|
||||
|
||||
## Stakeholder Constraints
|
||||
|
||||
> e.g., stakeholder expectations, communication preferences.
|
||||
|
||||
## Change Management Constraints
|
||||
|
||||
> e.g., processes for handling changes, version control practices.
|
||||
|
||||
## Resource Constraints
|
||||
|
||||
> e.g., availability of hardware, software, and human resources.
|
||||
|
||||
## Process Constraints
|
||||
|
||||
> e.g., development methodologies (Agile, Scrum), project management tools.
|
||||
|
||||
## Quality Constraints
|
||||
|
||||
> e.g., code quality standards, testing requirements.
|
||||
@@ -1,38 +0,0 @@
|
||||
---
|
||||
title: "03 — Context and Scope"
|
||||
description: "Describe system context, external actors, and the scope of the architecture."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 03 — Context and Scope
|
||||
|
||||
## System Context
|
||||
|
||||
The CalMiner system operates within the context of mining project management, providing tools for scenario analysis and decision support. It interacts with various data sources, including historical project data and real-time operational metrics.
|
||||
|
||||
## External Actors
|
||||
|
||||
- **Project Managers**: Utilize the platform for scenario planning and risk assessment.
|
||||
- **Data Analysts**: Analyze simulation results and derive insights.
|
||||
- **Executives**: Review high-level reports and dashboards for strategic decision-making.
|
||||
|
||||
## Scope of the Architecture
|
||||
|
||||
The architecture encompasses the following key areas:
|
||||
|
||||
1. **Data Ingestion**: Mechanisms for collecting and processing data from various sources.
|
||||
2. **Data Storage**: Solutions for storing and managing historical and real-time data.
|
||||
3. **Simulation Engine**: Core algorithms and models for scenario analysis.
|
||||
3.1. **Modeling Framework**: Tools for defining and managing simulation models.
|
||||
3.2. **Parameter Management**: Systems for handling input parameters and configurations.
|
||||
3.3. **Execution Engine**: Infrastructure for running simulations and processing results.
|
||||
3.4. **Result Storage**: Systems for storing simulation outputs for analysis and reporting.
|
||||
4. **Financial Reporting**: Tools for generating reports and visualizations based on simulation outcomes.
|
||||
5. **Risk Assessment**: Frameworks for identifying and evaluating potential project risks.
|
||||
6. **Profitability Analysis**: Modules for calculating and analyzing project profitability metrics.
|
||||
7. **User Interface**: Design and implementation of the user-facing components of the system.
|
||||
8. **Security and Compliance**: Measures to ensure data security and regulatory compliance.
|
||||
9. **Scalability and Performance**: Strategies for ensuring the system can handle increasing data volumes and user loads.
|
||||
10. **Integration Points**: Interfaces for integrating with external systems and services.
|
||||
11. **Monitoring and Logging**: Systems for tracking system performance and user activity.
|
||||
12. **Maintenance and Support**: Processes for ongoing system maintenance and user support.
|
||||
@@ -1,49 +0,0 @@
|
||||
---
|
||||
title: "04 — Solution Strategy"
|
||||
description: "High-level solution strategy describing major approaches, technology choices, and trade-offs."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 04 — Solution Strategy
|
||||
|
||||
This section outlines the high-level solution strategy for implementing the CalMiner system, focusing on major approaches, technology choices, and trade-offs.
|
||||
|
||||
## Client-Server Architecture
|
||||
|
||||
- **Backend**: FastAPI serves as the backend framework, providing RESTful APIs for data management, simulation execution, and reporting. It leverages SQLAlchemy for ORM-based database interactions with PostgreSQL.
|
||||
- **Frontend**: Server-rendered Jinja2 templates deliver dynamic HTML views, enhanced with Chart.js for interactive data visualizations. This approach balances performance and simplicity, avoiding the complexity of a full SPA.
|
||||
- **Middleware**: Custom middleware handles JSON validation to ensure data integrity before processing requests.
|
||||
|
||||
## Technology Choices
|
||||
|
||||
- **FastAPI**: Chosen for its high performance, ease of use, and modern features like async support and automatic OpenAPI documentation.
|
||||
- **PostgreSQL**: Selected for its robustness, scalability, and support for complex queries, making it suitable for handling the diverse data needs of mining project management.
|
||||
- **SQLAlchemy**: Provides a flexible and powerful ORM layer, facilitating database interactions while maintaining code readability and maintainability.
|
||||
- **Chart.js**: Utilized for its simplicity and effectiveness in rendering interactive charts, enhancing the user experience on the dashboard.
|
||||
- **Jinja2**: Enables server-side rendering of HTML templates, allowing for dynamic content generation while keeping the frontend lightweight.
|
||||
- **Pydantic**: Used for data validation and serialization, ensuring that incoming request payloads conform to expected schemas.
|
||||
- **Docker**: Employed for containerization, ensuring consistent deployment across different environments and simplifying dependency management.
|
||||
- **Redis**: Used as an in-memory data store to cache frequently accessed data, improving application performance and reducing database load.
|
||||
|
||||
## Trade-offs
|
||||
|
||||
- **Server-Rendered vs. SPA**: Opted for server-rendered templates over a single-page application (SPA) to reduce complexity and improve initial load times, at the cost of some interactivity.
|
||||
- **Synchronous vs. Asynchronous**: While FastAPI supports async operations, the initial implementation focuses on synchronous request handling for simplicity, with plans to introduce async features as needed.
|
||||
- **Monolithic vs. Microservices**: The initial architecture follows a monolithic approach for ease of development and deployment, with the possibility of refactoring into microservices as the system scales.
|
||||
- **In-Memory Caching**: Implementing Redis for caching introduces additional infrastructure complexity but significantly enhances performance for read-heavy operations.
|
||||
- **Database Choice**: PostgreSQL was chosen over NoSQL alternatives due to the structured nature of the data and the need for complex querying capabilities, despite potential scalability challenges.
|
||||
- **Technology Familiarity**: Selected technologies align with the team's existing skill set to minimize the learning curve and accelerate development, even if some alternatives may offer marginally better performance or features.
|
||||
- **Extensibility vs. Simplicity**: The architecture is designed to be extensible for future features (e.g., Monte Carlo simulation engine) while maintaining simplicity in the initial implementation to ensure timely delivery of core functionalities.
|
||||
|
||||
## Future Considerations
|
||||
|
||||
- **Scalability**: As the user base grows, consider transitioning to a microservices architecture and implementing load balancing strategies.
|
||||
- **Asynchronous Processing**: Introduce asynchronous task queues (e.g., Celery) for long-running simulations to improve responsiveness.
|
||||
- **Enhanced Frontend**: Explore the possibility of integrating a frontend framework (e.g., React or Vue.js) for more dynamic user interactions in future iterations.
|
||||
- **Advanced Analytics**: Plan for integrating advanced analytics and machine learning capabilities to enhance simulation accuracy and reporting insights.
|
||||
- **Security Enhancements**: Implement robust authentication and authorization mechanisms to protect sensitive data and ensure compliance with industry standards.
|
||||
- **Continuous Integration/Continuous Deployment (CI/CD)**: Establish CI/CD pipelines to automate testing, building, and deployment processes for faster and more reliable releases.
|
||||
- **Monitoring and Logging**: Integrate monitoring tools (e.g., Prometheus, Grafana) and centralized logging solutions (e.g., ELK stack) to track application performance and troubleshoot issues effectively.
|
||||
- **User Feedback Loop**: Implement mechanisms for collecting user feedback to inform future development priorities and improve user experience.
|
||||
- **Documentation**: Maintain comprehensive documentation for both developers and end-users to facilitate onboarding and effective use of the system.
|
||||
- **Testing Strategy**: Develop a robust testing strategy, including unit, integration, and end-to-end tests, to ensure code quality and reliability as the system evolves.
|
||||
@@ -1,110 +0,0 @@
|
||||
# Implementation Plan 2025-10-20
|
||||
|
||||
This file contains the implementation plan (MVP features, steps, and estimates).
|
||||
|
||||
## Project Setup
|
||||
|
||||
1. Connect to PostgreSQL database with schema `calminer`.
|
||||
1. Create and activate a virtual environment and install dependencies via `requirements.txt`.
|
||||
1. Define database environment variables in `.env` (e.g., `DATABASE_DRIVER`, `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME`, optional `DATABASE_SCHEMA`).
|
||||
1. Configure FastAPI entrypoint in `main.py` to include routers.
|
||||
|
||||
## Feature: Scenario Management
|
||||
|
||||
### Scenario Management — Steps
|
||||
|
||||
1. Create `models/scenario.py` for scenario CRUD.
|
||||
1. Implement API endpoints in `routes/scenarios.py` (GET, POST, PUT, DELETE).
|
||||
1. Write unit tests in `tests/unit/test_scenario.py`.
|
||||
1. Build UI component `components/ScenarioForm.html`.
|
||||
|
||||
## Feature: Process Parameters
|
||||
|
||||
### Parameters — Steps
|
||||
|
||||
1. Create `models/parameters.py` for process parameters.
|
||||
1. Implement Pydantic schemas in `routes/parameters.py`.
|
||||
1. Add validation middleware in `middleware/validation.py`.
|
||||
1. Write unit tests in `tests/unit/test_parameter.py`.
|
||||
1. Build UI component `components/ParameterInput.html`.
|
||||
|
||||
## Feature: Stochastic Variables
|
||||
|
||||
### Stochastic Variables — Steps
|
||||
|
||||
1. Create `models/distribution.py` for variable distributions.
|
||||
1. Implement API routes in `routes/distributions.py`.
|
||||
1. Write Pydantic schemas and validations.
|
||||
1. Write unit tests in `tests/unit/test_distribution.py`.
|
||||
1. Build UI component `components/DistributionEditor.html`.
|
||||
|
||||
## Feature: Cost Tracking
|
||||
|
||||
### Cost Tracking — Steps
|
||||
|
||||
1. Create `models/capex.py` and `models/opex.py`.
|
||||
1. Implement API routes in `routes/costs.py`.
|
||||
1. Write Pydantic schemas for CAPEX/OPEX.
|
||||
1. Write unit tests in `tests/unit/test_costs.py`.
|
||||
1. Build UI component `components/CostForm.html`.
|
||||
|
||||
## Feature: Consumption Tracking
|
||||
|
||||
### Consumption Tracking — Steps
|
||||
|
||||
1. Create models for consumption: `chemical_consumption.py`, `fuel_consumption.py`, `water_consumption.py`, `scrap_consumption.py`.
|
||||
1. Implement API routes in `routes/consumption.py`.
|
||||
1. Write Pydantic schemas for consumption data.
|
||||
1. Write unit tests in `tests/unit/test_consumption.py`.
|
||||
1. Build UI component `components/ConsumptionDashboard.html`.
|
||||
|
||||
## Feature: Production Output
|
||||
|
||||
### Production Output — Steps
|
||||
|
||||
1. Create `models/production_output.py`.
|
||||
1. Implement API routes in `routes/production.py`.
|
||||
1. Write Pydantic schemas for production output.
|
||||
1. Write unit tests in `tests/unit/test_production.py`.
|
||||
1. Build UI component `components/ProductionChart.html`.
|
||||
|
||||
## Feature: Equipment Management
|
||||
|
||||
### Equipment Management — Steps
|
||||
|
||||
1. Create `models/equipment.py` for equipment data.
|
||||
1. Implement API routes in `routes/equipment.py`.
|
||||
1. Write Pydantic schemas for equipment.
|
||||
1. Write unit tests in `tests/unit/test_equipment.py`.
|
||||
1. Build UI component `components/EquipmentList.html`.
|
||||
|
||||
## Feature: Maintenance Logging
|
||||
|
||||
### Maintenance Logging — Steps
|
||||
|
||||
1. Create `models/maintenance.py` for maintenance events.
|
||||
1. Implement API routes in `routes/maintenance.py`.
|
||||
1. Write Pydantic schemas for maintenance logs.
|
||||
1. Write unit tests in `tests/unit/test_maintenance.py`.
|
||||
1. Build UI component `components/MaintenanceLog.html`.
|
||||
|
||||
## Feature: Monte Carlo Simulation Engine
|
||||
|
||||
### Monte Carlo Engine — Steps
|
||||
|
||||
1. Implement Monte Carlo logic in `services/simulation.py`.
|
||||
1. Persist results in `models/simulation_result.py`.
|
||||
1. Expose endpoint in `routes/simulations.py`.
|
||||
1. Write integration tests in `tests/unit/test_simulation.py`.
|
||||
1. Build UI component `components/SimulationRunner.html`.
|
||||
|
||||
## Feature: Reporting / Dashboard
|
||||
|
||||
### Reporting / Dashboard — Steps
|
||||
|
||||
1. Implement report calculations in `services/reporting.py`.
|
||||
1. Add detailed and summary endpoints in `routes/reporting.py`.
|
||||
1. Write unit tests in `tests/unit/test_reporting.py`.
|
||||
1. Enhance UI in `components/Dashboard.html` with charts.
|
||||
|
||||
See [UI and Style](../13_ui_and_style.md) for the UI template audit, layout guidance, and next steps.
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
title: "05 — Building Block View"
|
||||
description: "Explain the static structure: modules, components, services and their relationships."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 05 — Building Block View
|
||||
|
||||
## Architecture overview
|
||||
|
||||
This overview complements [architecture](README.md) with a high-level map of CalMiner's module layout and request flow.
|
||||
|
||||
Refer to the detailed architecture chapters in `docs/architecture/`:
|
||||
|
||||
- Module map & components: [Building Block View](05_building_block_view.md)
|
||||
- Request flow & runtime interactions: [Runtime View](06_runtime_view.md)
|
||||
- Simulation roadmap & strategy: [Solution Strategy](04_solution_strategy.md)
|
||||
|
||||
## System Components
|
||||
|
||||
### Backend
|
||||
|
||||
- **FastAPI application** (`main.py`): entry point that configures routers, middleware, and startup/shutdown events.
|
||||
- **Routers** (`routes/`): modular route handlers for scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting. Each router defines RESTful endpoints, request/response schemas, and orchestrates service calls.
|
||||
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
||||
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
||||
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
||||
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
||||
|
||||
### Frontend
|
||||
|
||||
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
||||
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
||||
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
||||
|
||||
### Middleware & Utilities
|
||||
|
||||
- **Middleware** (`middleware/validation.py`): applies JSON validation before requests reach routers.
|
||||
- **Testing** (`tests/unit/`): pytest suite covering route and service behavior, including UI rendering checks and negative-path router validation tests to ensure consistent HTTP error semantics. Playwright end-to-end coverage is planned for core smoke flows (dashboard load, scenario inputs, reporting) and will attach in CI once scaffolding is completed.
|
||||
|
||||
## Module Map (code)
|
||||
|
||||
- `scenario.py`: central scenario entity with relationships to cost, consumption, production, equipment, maintenance, and simulation results.
|
||||
- `capex.py`, `opex.py`: financial expenditures tied to scenarios.
|
||||
- `consumption.py`, `production_output.py`: operational data tables.
|
||||
- `equipment.py`, `maintenance.py`: asset management models.
|
||||
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
||||
|
||||
## Service Layer
|
||||
|
||||
- `reporting.py`: computes aggregates (count, min/max, mean, median, percentiles, standard deviation, variance, tail-risk metrics) from simulation results.
|
||||
- `simulation.py`: scaffolds Monte Carlo simulation logic (currently in-memory; persistence planned).
|
||||
- `currency.py`: handles currency normalization for cost tables.
|
||||
- `utils.py`: shared helper functions (e.g., statistical calculations).
|
||||
- `validation.py`: JSON schema validation middleware.
|
||||
- `database.py`: SQLAlchemy engine and session setup.
|
||||
- `dependencies.py`: FastAPI dependency injection for DB sessions.
|
||||
@@ -1,288 +0,0 @@
|
||||
---
|
||||
title: "06 — Runtime View"
|
||||
description: "Describe runtime aspects: request flows, lifecycle of key interactions, and runtime components."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 06 — Runtime View
|
||||
|
||||
## Overview
|
||||
|
||||
The runtime view focuses on the dynamic behavior of the CalMiner application during execution. It illustrates how various components interact to fulfill user requests, process data, and generate outputs. Key runtime scenarios include scenario management, parameter input handling, cost tracking, consumption tracking, production output recording, equipment management, maintenance logging, Monte Carlo simulations, and reporting.
|
||||
|
||||
## Request Flow
|
||||
|
||||
1. **User Interaction**: A user interacts with the web application through the UI, triggering actions such as creating a scenario, inputting parameters, or generating reports.
|
||||
2. **API Request**: The frontend sends HTTP requests (GET, POST, PUT, DELETE) to the appropriate API endpoints defined in the `routes/` directory.
|
||||
3. **Routing**: The FastAPI framework routes the incoming requests to the corresponding route handlers.
|
||||
4. **Service Layer**: Route handlers invoke services from the `services/` directory to process the business logic.
|
||||
5. **Database Interaction**: Services interact with the database via ORM models defined in the `models/` directory to perform CRUD operations.
|
||||
6. **Response Generation**: After processing, services return data to the route handlers, which format the response (JSON or HTML) and send it back to the frontend.
|
||||
7. **UI Update**: The frontend updates the UI based on the response, rendering new data or updating existing views.
|
||||
8. **Reporting Pipeline**: For reporting, data is aggregated from various sources, processed to generate statistics, and presented in the dashboard using Chart.js.
|
||||
9. **Monte Carlo Simulations**: Stochastic simulations are executed in the backend, generating probabilistic outcomes that are stored temporarily and used for risk analysis in reports.
|
||||
10. **Error Handling**: Throughout the process, error handling mechanisms ensure that exceptions are caught and appropriate responses are sent back to the user.
|
||||
|
||||
Request flow diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant Service
|
||||
participant Database
|
||||
|
||||
User->>Frontend: Interact with UI
|
||||
Frontend->>API: Send HTTP Request
|
||||
API->>Service: Route to Handler
|
||||
Service->>Database: Perform CRUD Operation
|
||||
Database-->>Service: Return Data
|
||||
Service-->>API: Return Processed Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Update UI
|
||||
|
||||
participant Reporting
|
||||
|
||||
Service->>Reporting: Aggregate Data
|
||||
Reporting-->>Service: Return Report Data
|
||||
Service-->>API: Return Report Response
|
||||
API-->>Frontend: Send Report Data
|
||||
Frontend-->>User: Render Report
|
||||
|
||||
participant Simulation
|
||||
Service->>Simulation: Execute Monte Carlo Simulation
|
||||
Simulation-->>Service: Return Simulation Results
|
||||
|
||||
Service-->>API: Return Simulation Data
|
||||
API-->>Frontend: Send Simulation Data
|
||||
Frontend-->>User: Display Simulation Results
|
||||
```
|
||||
|
||||
## Key Runtime Scenarios
|
||||
|
||||
### Scenario Management
|
||||
|
||||
1. User accesses the scenario list via the UI.
|
||||
2. The frontend sends a GET request to `/api/scenarios`.
|
||||
3. The `ScenarioService` retrieves scenarios from the database.
|
||||
4. The response is rendered in the UI.
|
||||
5. For scenario creation, the user submits a form, triggering a POST request to `/api/scenarios`, which the `ScenarioService` processes to create a new scenario in the database.
|
||||
6. The UI updates to reflect the new scenario.
|
||||
|
||||
Scenario management diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant ScenarioService
|
||||
participant Database
|
||||
|
||||
User->>Frontend: Access Scenario List
|
||||
Frontend->>API: GET /api/scenarios
|
||||
API->>ScenarioService: Route to Handler
|
||||
ScenarioService->>Database: Retrieve Scenarios
|
||||
Database-->>ScenarioService: Return Scenarios
|
||||
ScenarioService-->>API: Return Scenario Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Render Scenario List
|
||||
|
||||
User->>Frontend: Submit New Scenario Form
|
||||
Frontend->>API: POST /api/scenarios
|
||||
API->>ScenarioService: Route to Handler
|
||||
ScenarioService->>Database: Create New Scenario
|
||||
Database-->>ScenarioService: Confirm Creation
|
||||
ScenarioService-->>API: Return New Scenario Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Update UI with New Scenario
|
||||
```
|
||||
|
||||
### Process Parameter Input
|
||||
|
||||
1. User navigates to the parameter input form.
|
||||
2. The frontend fetches existing parameters via a GET request to `/api/parameters`.
|
||||
3. The `ParameterService` retrieves parameters from the database.
|
||||
4. The response is rendered in the UI.
|
||||
5. For parameter updates, the user submits a form, triggering a PUT request to `/api/parameters/:id`, which the `ParameterService` processes to update the parameter in the database.
|
||||
6. The UI updates to reflect the changes.
|
||||
|
||||
Parameter input diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant ParameterService
|
||||
participant Database
|
||||
|
||||
User->>Frontend: Navigate to Parameter Input Form
|
||||
Frontend->>API: GET /api/parameters
|
||||
API->>ParameterService: Route to Handler
|
||||
ParameterService->>Database: Retrieve Parameters
|
||||
Database-->>ParameterService: Return Parameters
|
||||
ParameterService-->>API: Return Parameter Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Render Parameter Form
|
||||
|
||||
User->>Frontend: Submit Parameter Update Form
|
||||
Frontend->>API: PUT /api/parameters/:id
|
||||
API->>ParameterService: Route to Handler
|
||||
ParameterService->>Database: Update Parameter
|
||||
Database-->>ParameterService: Confirm Update
|
||||
ParameterService-->>API: Return Updated Parameter Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Update UI with Updated Parameter
|
||||
```
|
||||
|
||||
### Cost Tracking
|
||||
|
||||
1. User accesses the cost tracking view.
|
||||
2. The frontend sends a GET request to `/api/costs` to fetch existing cost records.
|
||||
3. The `CostService` retrieves cost data from the database.
|
||||
4. The response is rendered in the UI.
|
||||
5. For cost updates, the user submits a form, triggering a PUT request to `/api/costs/:id`, which the `CostService` processes to update the cost record in the database.
|
||||
6. The UI updates to reflect the changes.
|
||||
|
||||
Cost tracking diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant CostService
|
||||
participant Database
|
||||
|
||||
User->>Frontend: Access Cost Tracking View
|
||||
Frontend->>API: GET /api/costs
|
||||
API->>CostService: Route to Handler
|
||||
CostService->>Database: Retrieve Cost Records
|
||||
Database-->>CostService: Return Cost Data
|
||||
CostService-->>API: Return Cost Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Render Cost Tracking View
|
||||
|
||||
User->>Frontend: Submit Cost Update Form
|
||||
Frontend->>API: PUT /api/costs/:id
|
||||
API->>CostService: Route to Handler
|
||||
CostService->>Database: Update Cost Record
|
||||
Database-->>CostService: Confirm Update
|
||||
CostService-->>API: Return Updated Cost Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Update UI with Updated Cost Data
|
||||
```
|
||||
|
||||
## Reporting Pipeline and UI Integration
|
||||
|
||||
1. **Data Sources**
|
||||
|
||||
- Scenario-linked calculations (costs, consumption, production) produce raw figures stored in dedicated tables (`capex`, `opex`, `consumption`, `production_output`).
|
||||
- Monte Carlo simulations (currently transient) generate arrays of `{ "result": float }` tuples that the dashboard or downstream tooling passes directly to reporting endpoints.
|
||||
|
||||
2. **API Contract**
|
||||
|
||||
- `POST /api/reporting/summary` accepts a JSON array of result objects and validates shape through `_validate_payload` in `routes/reporting.py`.
|
||||
- On success it returns a structured payload (`ReportSummary`) containing count, mean, median, min/max, standard deviation, and percentile values, all as floats.
|
||||
|
||||
3. **Service Layer**
|
||||
|
||||
- `services/reporting.generate_report` converts the sanitized payload into descriptive statistics using Python’s standard library (`statistics` module) to avoid external dependencies.
|
||||
- The service remains stateless; no database read/write occurs, which keeps summary calculations deterministic and idempotent.
|
||||
- Extended KPIs (surfaced in the API and dashboard):
|
||||
- `variance`: population variance computed as the square of the population standard deviation.
|
||||
- `percentile_5` and `percentile_95`: lower and upper tail interpolated percentiles for sensitivity bounds.
|
||||
- `value_at_risk_95`: 5th percentile threshold representing the minimum outcome within a 95% confidence band.
|
||||
- `expected_shortfall_95`: mean of all outcomes at or below the `value_at_risk_95`, highlighting tail exposure.
|
||||
|
||||
4. **UI Consumption**
|
||||
|
||||
- `templates/Dashboard.html` posts the user-provided dataset to the summary endpoint, renders metric cards for each field, and charts the distribution using Chart.js.
|
||||
- `SUMMARY_FIELDS` now includes variance, 5th/10th/90th/95th percentiles, and tail-risk metrics (VaR/Expected Shortfall at 95%); tooltip annotations surface the tail metrics alongside the percentile line chart.
|
||||
- Error handling surfaces HTTP failures inline so users can address malformed JSON or backend availability issues without leaving the page.
|
||||
|
||||
Reporting pipeline diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant ReportingService
|
||||
|
||||
User->>Frontend: Input Data for Reporting
|
||||
Frontend->>API: POST /api/reporting/summary
|
||||
API->>ReportingService: Route to Handler
|
||||
ReportingService->>ReportingService: Validate Payload
|
||||
ReportingService->>ReportingService: Compute Statistics
|
||||
ReportingService-->>API: Return Report Summary
|
||||
API-->>Frontend: Send Report Summary
|
||||
Frontend-->>User: Render Report Metrics and Charts
|
||||
```
|
||||
|
||||
## Monte Carlo Simulation Execution
|
||||
|
||||
1. User initiates a Monte Carlo simulation via the UI.
|
||||
2. The frontend sends a POST request to `/api/simulations/run` with simulation parameters.
|
||||
3. The `SimulationService` executes the Monte Carlo logic, generating stochastic results.
|
||||
4. The results are temporarily stored and returned to the frontend.
|
||||
5. The UI displays the simulation results and allows users to trigger reporting based on these outcomes.
|
||||
6. The reporting pipeline processes the simulation results as described above.
|
||||
7. Error handling ensures that any issues during simulation execution are communicated back to the user.
|
||||
8. Monte Carlo simulation diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant SimulationService
|
||||
|
||||
User->>Frontend: Input Simulation Parameters
|
||||
Frontend->>API: POST /api/simulations/run
|
||||
API->>SimulationService: Route to Handler
|
||||
SimulationService->>SimulationService: Execute Monte Carlo Logic
|
||||
SimulationService-->>API: Return Simulation Results
|
||||
API-->>Frontend: Send Simulation Results
|
||||
Frontend-->>User: Render Simulation Results
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Throughout the runtime processes, error handling mechanisms are implemented to catch exceptions and provide meaningful feedback to users. Common error scenarios include:
|
||||
|
||||
- Invalid input data
|
||||
- Database connection issues
|
||||
- Simulation execution errors
|
||||
- Reporting calculation failures
|
||||
- API endpoint unavailability
|
||||
- Timeouts during long-running operations
|
||||
- Unauthorized access attempts
|
||||
- Data validation failures
|
||||
- Resource not found errors
|
||||
|
||||
Error handling diagram:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant Frontend
|
||||
participant API
|
||||
participant Service
|
||||
|
||||
User->>Frontend: Perform Action
|
||||
Frontend->>API: Send Request
|
||||
API->>Service: Route to Handler
|
||||
Service->>Service: Process Request
|
||||
alt Success
|
||||
Service-->>API: Return Data
|
||||
API-->>Frontend: Send Response
|
||||
Frontend-->>User: Update UI
|
||||
else Error
|
||||
Service-->>API: Return Error
|
||||
API-->>Frontend: Send Error Response
|
||||
Frontend-->>User: Display Error Message
|
||||
end
|
||||
```
|
||||
@@ -1,88 +0,0 @@
|
||||
---
|
||||
title: "07 — Deployment View"
|
||||
description: "Describe deployment topology, infrastructure components, and environments (dev/stage/prod)."
|
||||
status: draft
|
||||
---
|
||||
|
||||
<!-- markdownlint-disable-next-line MD025 -->
|
||||
|
||||
# 07 — Deployment View
|
||||
|
||||
## Deployment Topology
|
||||
|
||||
The CalMiner application is deployed using a multi-tier architecture consisting of the following layers:
|
||||
|
||||
1. **Client Layer**: This layer consists of web browsers that interact with the application through a user interface rendered by Jinja2 templates and enhanced with JavaScript (Chart.js for dashboards).
|
||||
2. **Web Application Layer**: This layer hosts the FastAPI application, which handles API requests, business logic, and serves HTML templates. It communicates with the database layer for data persistence.
|
||||
3. **Database Layer**: This layer consists of a PostgreSQL database that stores all application data, including scenarios, parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results.
|
||||
4. **Caching Layer**: This layer uses Redis to cache frequently accessed data and improve application performance.
|
||||
|
||||
## Infrastructure Components
|
||||
|
||||
The infrastructure components for the application include:
|
||||
|
||||
- **Web Server**: Hosts the FastAPI application and serves API endpoints.
|
||||
- **Database Server**: PostgreSQL database for persisting application data.
|
||||
- **Static File Server**: Serves static assets such as CSS, JavaScript, and image files.
|
||||
- **Reverse Proxy (optional)**: An Nginx or Apache server can be used as a reverse proxy.
|
||||
- **Containerization**: Docker images are generated via the repository `Dockerfile`, using a multi-stage build to keep the final runtime minimal.
|
||||
- **CI/CD Pipeline**: Automated pipelines (Gitea Actions) run tests, build/push Docker images, and trigger deployments.
|
||||
- **Cloud Infrastructure (optional)**: The application can be deployed on cloud platforms.
|
||||
|
||||
## Environments
|
||||
|
||||
The application can be deployed in multiple environments to support development, testing, and production:
|
||||
|
||||
### Development Environment
|
||||
|
||||
The development environment is set up for local development and testing. It includes:
|
||||
|
||||
- Local PostgreSQL instance
|
||||
- FastAPI server running in debug mode
|
||||
|
||||
### Testing Environment
|
||||
|
||||
The testing environment is set up for automated testing and quality assurance. It includes:
|
||||
|
||||
- Staging PostgreSQL instance
|
||||
- FastAPI server running in testing mode
|
||||
- Automated test suite (e.g., pytest) for running unit and integration tests
|
||||
|
||||
### Production Environment
|
||||
|
||||
The production environment is set up for serving live traffic and includes:
|
||||
|
||||
- Production PostgreSQL instance
|
||||
- FastAPI server running in production mode
|
||||
- Load balancer (e.g., Nginx) for distributing incoming requests
|
||||
- Monitoring and logging tools for tracking application performance
|
||||
|
||||
## Containerized Deployment Flow
|
||||
|
||||
The Docker-based deployment path aligns with the solution strategy documented in [04 — Solution Strategy](04_solution_strategy.md) and the CI practices captured in [14 — Testing & CI](14_testing_ci.md).
|
||||
|
||||
### Image Build
|
||||
|
||||
- The multi-stage `Dockerfile` installs dependencies in a builder layer (including system compilers and Python packages) and copies only the required runtime artifacts to the final image.
|
||||
- Build arguments are minimal; database configuration is supplied at runtime via granular variables (`DATABASE_DRIVER`, `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME`, optional `DATABASE_SCHEMA`). Secrets and configuration should be passed via environment variables or an orchestrator.
|
||||
- The resulting image exposes port `8000` and starts `uvicorn main:app` (s. [README.md](../../README.md)).
|
||||
|
||||
### Runtime Environment
|
||||
|
||||
- For single-node deployments, run the container alongside PostgreSQL/Redis using Docker Compose or an equivalent orchestrator.
|
||||
- A reverse proxy (e.g., Nginx) terminates TLS and forwards traffic to the container on port `8000`.
|
||||
- Migrations must be applied prior to rolling out a new image; automation can hook into the deploy step to run `scripts/run_migrations.py`.
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
- Gitea Actions workflows reside under `.gitea/workflows/`.
|
||||
- `test.yml` executes the pytest suite using cached pip dependencies.
|
||||
- `build-and-push.yml` logs into the container registry, rebuilds the Docker image using GitHub Actions cache-backed layers, and pushes `latest` (and additional tags as required).
|
||||
- `deploy.yml` connects to the target host via SSH, pulls the pushed tag, stops any existing container, and launches the new version.
|
||||
- Required secrets: `REGISTRY_URL`, `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
||||
- Extend these workflows when introducing staging/blue-green deployments; keep cross-links with [14 — Testing & CI](14_testing_ci.md) up to date.
|
||||
|
||||
## Integrations and Future Work (deployment-related)
|
||||
|
||||
- **Persistence of results**: `/api/simulations/run` currently returns in-memory results; next iteration should persist to `simulation_result` and reference scenarios.
|
||||
- **Deployment**: implement infrastructure-as-code (e.g., Terraform/Ansible) to provision the hosting environment and maintain parity across dev/stage/prod.
|
||||
@@ -1,55 +0,0 @@
|
||||
---
|
||||
title: "08 — Concepts"
|
||||
description: "Document key concepts, domain models, and terminology used throughout the architecture documentation."
|
||||
status: draft
|
||||
---
|
||||
|
||||
# 08 — Concepts
|
||||
|
||||
## Key Concepts
|
||||
|
||||
### Scenario
|
||||
|
||||
A `scenario` represents a distinct mining project configuration, encapsulating all relevant parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results. Each scenario is independent, allowing users to model and analyze different mining strategies.
|
||||
|
||||
### Parameterization
|
||||
|
||||
Parameters are defined for each scenario to capture inputs such as resource consumption rates, production targets, cost factors, and equipment specifications. Parameters can have fixed values or be linked to probability distributions for stochastic simulations.
|
||||
|
||||
### Monte Carlo Simulation
|
||||
|
||||
The Monte Carlo simulation engine allows users to perform risk analysis by running multiple iterations of a scenario with varying input parameters based on defined probability distributions. This helps in understanding the range of possible outcomes and their associated probabilities.
|
||||
|
||||
## Domain Model
|
||||
|
||||
The domain model consists of the following key entities:
|
||||
|
||||
- `Scenario`: Represents a mining project configuration.
|
||||
- `Parameter`: Input values for scenarios, which can be fixed or probabilistic.
|
||||
- `Cost`: Tracks capital and operational expenditures.
|
||||
- `Consumption`: Records resource usage.
|
||||
- `ProductionOutput`: Captures production metrics.
|
||||
- `Equipment`: Represents mining equipment associated with a scenario.
|
||||
- `Maintenance`: Logs maintenance events for equipment.
|
||||
- `SimulationResult`: Stores results from Monte Carlo simulations.
|
||||
- `Distribution`: Defines probability distributions for stochastic parameters.
|
||||
- `User`: Represents application users and their roles.
|
||||
- `Report`: Generated reports summarizing scenario analyses.
|
||||
- `Dashboard`: Visual representation of key performance indicators and metrics.
|
||||
- `AuditLog`: Tracks changes and actions performed within the application.
|
||||
- `Notification`: Alerts and messages related to scenario events and updates.
|
||||
- `Tag`: Labels for categorizing scenarios and other entities.
|
||||
- `Attachment`: Files associated with scenarios, such as documents or images.
|
||||
- `Version`: Tracks different versions of scenarios and their configurations.
|
||||
|
||||
## Data Model Highlights
|
||||
|
||||
- `scenario`: central entity describing a mining scenario; owns relationships to cost, consumption, production, equipment, and maintenance tables.
|
||||
- `capex`, `opex`: monetary tracking linked to scenarios.
|
||||
- `consumption`: resource usage entries parameterized by scenario and description.
|
||||
- `parameter`: scenario inputs with base `value` and optional distribution linkage via `distribution_id`, `distribution_type`, and JSON `distribution_parameters` to support simulation sampling.
|
||||
- `production_output`: production metrics per scenario.
|
||||
- `equipment` and `maintenance`: equipment inventory and maintenance events with dates/costs.
|
||||
- `simulation_result`: staging table for future Monte Carlo outputs (not yet populated by `run_simulation`).
|
||||
|
||||
Foreign keys secure referential integrity between domain tables and their scenarios, enabling per-scenario analytics.
|
||||
@@ -1,5 +0,0 @@
|
||||
# 09 — Architecture Decisions
|
||||
|
||||
Status: skeleton
|
||||
|
||||
Record important architectural decisions, their rationale, and alternatives considered.
|
||||
@@ -1,5 +0,0 @@
|
||||
# 10 — Quality Requirements
|
||||
|
||||
Status: skeleton
|
||||
|
||||
List non-functional requirements (performance, scalability, reliability, security) and measurable acceptance criteria.
|
||||
@@ -1,5 +0,0 @@
|
||||
# 11 — Technical Risks
|
||||
|
||||
Status: skeleton
|
||||
|
||||
Document potential technical risks, mitigation strategies, and monitoring suggestions.
|
||||
@@ -1,5 +0,0 @@
|
||||
# 12 — Glossary
|
||||
|
||||
Status: skeleton
|
||||
|
||||
Project glossary and definitions for domain-specific terms.
|
||||
@@ -1,85 +0,0 @@
|
||||
# 13 — UI, templates and styling
|
||||
|
||||
Status: migrated
|
||||
|
||||
This chapter collects UI integration notes, reusable template components, styling audit points and per-page UI data/actions.
|
||||
|
||||
## Reusable Template Components
|
||||
|
||||
To reduce duplication across form-centric pages, shared Jinja macros live in `templates/partials/components.html`.
|
||||
|
||||
- `select_field(...)`: renders labeled `<select>` controls with consistent placeholder handling and optional preselection. Existing JavaScript modules continue to target the generated IDs, so template calls must pass the same identifiers (`consumption-form-scenario`, etc.).
|
||||
- `feedback(...)` and `empty_state(...)`: wrap status messages in standard classes (`feedback`, `empty-state`) with optional `hidden` toggles so scripts can control visibility without reimplementing markup.
|
||||
- `table_container(...)`: provides a semantic wrapper and optional heading around tabular content; the `{% call %}` body supplies the `<thead>`, `<tbody>`, and `<tfoot>` elements while the macro applies the `table-container` class and manages hidden state.
|
||||
|
||||
Pages like `templates/consumption.html` and `templates/costs.html` already consume these helpers to keep markup aligned while preserving existing JavaScript selectors.
|
||||
|
||||
Import macros via:
|
||||
|
||||
```jinja
|
||||
{% from "partials/components.html" import select_field, feedback, table_container with context %}
|
||||
```
|
||||
|
||||
## Styling Audit Notes (2025-10-21)
|
||||
|
||||
- **Spacing**: Panels (`section.panel`) sometimes lack consistent vertical rhythm between headings, form grids, and tables. Extra top/bottom margin utilities would help align content.
|
||||
- **Typography**: Headings rely on browser defaults; font-size scale is uneven between `<h2>` and `<h3>`. Define explicit scale tokens (e.g., `--font-size-lg`) for predictable sizing.
|
||||
- **Forms**: `.form-grid` uses fixed column gaps that collapse on small screens; introduce responsive grid rules to stack gracefully below ~768px.
|
||||
- **Tables**: `.table-container` wrappers need overflow handling for narrow viewports; consider `overflow-x: auto` with padding adjustments.
|
||||
- **Feedback/Empty states**: Messages use default font weight and spacing; a utility class for margin/padding would ensure consistent separation from forms or tables.
|
||||
|
||||
## Per-page data & actions
|
||||
|
||||
Short reference of per-page APIs and primary actions used by templates and scripts.
|
||||
|
||||
- Scenarios (`templates/ScenarioForm.html`):
|
||||
|
||||
- Data: `GET /api/scenarios/`
|
||||
- Actions: `POST /api/scenarios/`
|
||||
|
||||
- Parameters (`templates/ParameterInput.html`):
|
||||
|
||||
- Data: `GET /api/scenarios/`, `GET /api/parameters/`
|
||||
- Actions: `POST /api/parameters/`
|
||||
|
||||
- Costs (`templates/costs.html`):
|
||||
|
||||
- Data: `GET /api/costs/capex`, `GET /api/costs/opex`
|
||||
- Actions: `POST /api/costs/capex`, `POST /api/costs/opex`
|
||||
|
||||
- Consumption (`templates/consumption.html`):
|
||||
|
||||
- Data: `GET /api/consumption/`
|
||||
- Actions: `POST /api/consumption/`
|
||||
|
||||
- Production (`templates/production.html`):
|
||||
|
||||
- Data: `GET /api/production/`
|
||||
- Actions: `POST /api/production/`
|
||||
|
||||
- Equipment (`templates/equipment.html`):
|
||||
|
||||
- Data: `GET /api/equipment/`
|
||||
- Actions: `POST /api/equipment/`
|
||||
|
||||
- Maintenance (`templates/maintenance.html`):
|
||||
|
||||
- Data: `GET /api/maintenance/` (pagination support)
|
||||
- Actions: `POST /api/maintenance/`, `PUT /api/maintenance/{id}`, `DELETE /api/maintenance/{id}`
|
||||
|
||||
- Simulations (`templates/simulations.html`):
|
||||
|
||||
- Data: `GET /api/scenarios/`, `GET /api/parameters/`
|
||||
- Actions: `POST /api/simulations/run`
|
||||
|
||||
- Reporting (`templates/reporting.html` and `templates/Dashboard.html`):
|
||||
- Data: `POST /api/reporting/summary` (accepts arrays of `{ "result": float }` objects)
|
||||
- Actions: Trigger summary refreshes and export/download actions.
|
||||
|
||||
## UI Template Audit (2025-10-20)
|
||||
|
||||
- Existing HTML templates: `ScenarioForm.html`, `ParameterInput.html`, and `Dashboard.html` (reporting summary view).
|
||||
- Coverage gaps remain for costs, consumption, production, equipment, maintenance, and simulation workflows—no dedicated templates yet.
|
||||
- Shared layout primitives (navigation/header/footer) are absent; current pages duplicate boilerplate markup.
|
||||
- Dashboard currently covers reporting metrics but should be wired to a central `/` route once the shared layout lands.
|
||||
- Next steps: introduce a `base.html`, refactor existing templates to extend it, and scaffold placeholder pages for the remaining features.
|
||||
@@ -1,117 +0,0 @@
|
||||
# 14 Testing, CI and Quality Assurance
|
||||
|
||||
This chapter centralizes the project's testing strategy, CI configuration, and quality targets.
|
||||
|
||||
## Overview
|
||||
|
||||
CalMiner uses a combination of unit, integration, and end-to-end tests to ensure quality.
|
||||
|
||||
### Frameworks
|
||||
|
||||
- Backend: pytest for unit and integration tests.
|
||||
- Frontend: pytest with Playwright for E2E tests.
|
||||
- Database: pytest fixtures with psycopg2 for DB tests.
|
||||
|
||||
### Test Types
|
||||
|
||||
- Unit Tests: Test individual functions/modules.
|
||||
- Integration Tests: Test API endpoints and DB interactions.
|
||||
- E2E Tests: Playwright for full user flows.
|
||||
|
||||
### CI/CD
|
||||
|
||||
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
||||
- `test.yml` runs on every push with cached Python dependencies via `actions/cache@v3`.
|
||||
- `build-and-push.yml` builds the Docker image with `docker/build-push-action@v2`, reusing GitHub Actions cache-backed layers, and pushes to the Gitea registry.
|
||||
- `deploy.yml` connects to the target host (via `appleboy/ssh-action`) to pull the freshly pushed image and restart the container.
|
||||
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
||||
- Run tests on pull requests to shared branches; enforce coverage target ≥80% (pytest-cov).
|
||||
|
||||
### Running Tests
|
||||
|
||||
- Unit: `pytest tests/unit/`
|
||||
- E2E: `pytest tests/e2e/`
|
||||
- All: `pytest`
|
||||
|
||||
### Test Directory Structure
|
||||
|
||||
Organize tests under the `tests/` directory mirroring the application structure:
|
||||
|
||||
````text
|
||||
tests/
|
||||
unit/
|
||||
test_<module>.py
|
||||
e2e/
|
||||
test_<flow>.py
|
||||
fixtures/
|
||||
conftest.py
|
||||
```python
|
||||
|
||||
### Fixtures and Test Data
|
||||
|
||||
- Define reusable fixtures in `tests/fixtures/conftest.py`.
|
||||
- Use temporary in-memory databases or isolated schemas for DB tests.
|
||||
- Load sample data via fixtures for consistent test environments.
|
||||
- Leverage the `seeded_ui_data` fixture in `tests/unit/conftest.py` to populate scenarios with related cost, maintenance, and simulation records for deterministic UI route checks.
|
||||
|
||||
### E2E (Playwright) Tests
|
||||
|
||||
The E2E test suite, located in `tests/e2e/`, uses Playwright to simulate user interactions in a live browser environment. These tests are designed to catch issues in the UI, frontend-backend integration, and overall application flow.
|
||||
|
||||
#### Fixtures
|
||||
|
||||
- `live_server`: A session-scoped fixture that launches the FastAPI application in a separate process, making it accessible to the browser.
|
||||
- `playwright_instance`, `browser`, `page`: Standard `pytest-playwright` fixtures for managing the Playwright instance, browser, and individual pages.
|
||||
|
||||
#### Smoke Tests
|
||||
|
||||
- UI Page Loading: `test_smoke.py` contains a parameterized test that systematically navigates to all UI routes to ensure they load without errors, have the correct title, and display a primary heading.
|
||||
- Form Submissions: Each major form in the application has a corresponding test file (e.g., `test_scenarios.py`, `test_costs.py`) that verifies: page loads, create item by filling the form, success message, and UI updates.
|
||||
|
||||
### Running E2E Tests
|
||||
|
||||
To run the Playwright tests:
|
||||
|
||||
```bash
|
||||
pytest tests/e2e/
|
||||
````
|
||||
|
||||
To run headed mode:
|
||||
|
||||
```bash
|
||||
pytest tests/e2e/ --headed
|
||||
```
|
||||
|
||||
### Mocking and Dependency Injection
|
||||
|
||||
- Use `unittest.mock` to mock external dependencies.
|
||||
- Inject dependencies via function parameters or FastAPI's dependency overrides in tests.
|
||||
|
||||
### Code Coverage
|
||||
|
||||
- Install `pytest-cov` to generate coverage reports.
|
||||
- Run with coverage: `pytest --cov --cov-report=term` (use `--cov-report=html` when visualizing hotspots).
|
||||
- Target 95%+ overall coverage. Focus on historically low modules: `services/simulation.py`, `services/reporting.py`, `middleware/validation.py`, and `routes/ui.py`.
|
||||
- Latest snapshot (2025-10-21): `pytest --cov=. --cov-report=term-missing` returns **91%** overall coverage.
|
||||
|
||||
### CI Integration
|
||||
|
||||
`test.yml` encapsulates the steps below:
|
||||
|
||||
- Check out the repository and set up Python 3.10.
|
||||
- Restore the pip cache (keyed by `requirements.txt`).
|
||||
- Install project dependencies and Playwright browsers (if needed for E2E).
|
||||
- Run `pytest` (extend with `--cov` flags when enforcing coverage).
|
||||
|
||||
`build-and-push.yml` adds:
|
||||
|
||||
- Registry login using repository secrets.
|
||||
- Docker image build/push with GHA cache storage (`cache-from/cache-to` set to `type=gha`).
|
||||
|
||||
`deploy.yml` handles:
|
||||
|
||||
- SSH into the deployment host.
|
||||
- Pull the tagged image from the registry.
|
||||
- Stop, remove, and relaunch the `calminer` container exposing port 8000.
|
||||
|
||||
When adding new workflows, mirror this structure to ensure secrets, caching, and deployment steps remain aligned with the production environment.
|
||||
@@ -1,77 +0,0 @@
|
||||
# 15 Development Setup Guide
|
||||
|
||||
This document outlines the local development environment and steps to get the project running.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Python (version 3.10+)
|
||||
- PostgreSQL (version 13+)
|
||||
- Git
|
||||
|
||||
## Clone and Project Setup
|
||||
|
||||
````powershell
|
||||
# Clone the repository
|
||||
git clone https://git.allucanget.biz/allucanget/calminer.git
|
||||
cd calminer
|
||||
```python
|
||||
|
||||
## Virtual Environment
|
||||
|
||||
```powershell
|
||||
# Create and activate a virtual environment
|
||||
python -m venv .venv
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
```python
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
```powershell
|
||||
pip install -r requirements.txt
|
||||
```python
|
||||
|
||||
## Database Setup
|
||||
|
||||
1. Create database user:
|
||||
|
||||
```sql
|
||||
CREATE USER calminer_user WITH PASSWORD 'your_password';
|
||||
````
|
||||
|
||||
1. Create database:
|
||||
|
||||
````sql
|
||||
CREATE DATABASE calminer;
|
||||
```python
|
||||
|
||||
## Environment Variables
|
||||
|
||||
1. Copy `.env.example` to `.env` at project root.
|
||||
1. Edit `.env` to set database connection details:
|
||||
|
||||
```dotenv
|
||||
DATABASE_DRIVER=postgresql
|
||||
DATABASE_HOST=localhost
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=calminer_user
|
||||
DATABASE_PASSWORD=your_password
|
||||
DATABASE_NAME=calminer
|
||||
DATABASE_SCHEMA=public
|
||||
````
|
||||
|
||||
1. The application uses `python-dotenv` to load these variables. A legacy `DATABASE_URL` value is still accepted if the granular keys are omitted.
|
||||
|
||||
## Running the Application
|
||||
|
||||
````powershell
|
||||
# Start the FastAPI server
|
||||
uvicorn main:app --reload
|
||||
```python
|
||||
|
||||
## Testing
|
||||
|
||||
```powershell
|
||||
pytest
|
||||
````
|
||||
|
||||
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
||||
@@ -1,26 +0,0 @@
|
||||
---
|
||||
title: "CalMiner Architecture Documentation"
|
||||
description: "arc42-based architecture documentation for the CalMiner project"
|
||||
---
|
||||
|
||||
# Architecture documentation (arc42 mapping)
|
||||
|
||||
This folder mirrors the arc42 chapter structure (adapted to Markdown).
|
||||
|
||||
## Files
|
||||
|
||||
- [01 Introduction and Goals](01_introduction_and_goals.md)
|
||||
- [02 Architecture Constraints](02_architecture_constraints.md)
|
||||
- [03 Context and Scope](03_context_and_scope.md)
|
||||
- [04 Solution Strategy](04_solution_strategy.md)
|
||||
- [05 Building Block View](05_building_block_view.md)
|
||||
- [06 Runtime View](06_runtime_view.md)
|
||||
- [07 Deployment View](07_deployment_view.md)
|
||||
- [08 Concepts](08_concepts.md)
|
||||
- [09 Architecture Decisions](09_architecture_decisions.md)
|
||||
- [10 Quality Requirements](10_quality_requirements.md)
|
||||
- [11 Technical Risks](11_technical_risks.md)
|
||||
- [12 Glossary](12_glossary.md)
|
||||
- [13 UI and Style](13_ui_and_style.md)
|
||||
- [14 Testing & CI](14_testing_ci.md)
|
||||
- [15 Development Setup](15_development_setup.md)
|
||||
@@ -1,116 +0,0 @@
|
||||
# Quickstart & Expanded Project Documentation
|
||||
|
||||
This document contains the expanded development, usage, testing, and migration guidance moved out of the top-level README for brevity.
|
||||
|
||||
## Development
|
||||
|
||||
To get started locally:
|
||||
|
||||
```powershell
|
||||
# Clone the repository
|
||||
git clone https://git.allucanget.biz/allucanget/calminer.git
|
||||
cd calminer
|
||||
|
||||
# Create and activate a virtual environment
|
||||
python -m venv .venv
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Start the development server
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
|
||||
## Docker-based setup
|
||||
|
||||
To build and run the application using Docker instead of a local Python environment:
|
||||
|
||||
```powershell
|
||||
# Build the application image (multi-stage build keeps runtime small)
|
||||
docker build -t calminer:latest .
|
||||
|
||||
# Start the container on port 8000
|
||||
docker run --rm -p 8000:8000 calminer:latest
|
||||
|
||||
# Supply environment variables (e.g., Postgres connection)
|
||||
docker run --rm -p 8000:8000 ^
|
||||
-e DATABASE_DRIVER="postgresql" ^
|
||||
-e DATABASE_HOST="db.host" ^
|
||||
-e DATABASE_PORT="5432" ^
|
||||
-e DATABASE_USER="calminer" ^
|
||||
-e DATABASE_PASSWORD="s3cret" ^
|
||||
-e DATABASE_NAME="calminer" ^
|
||||
-e DATABASE_SCHEMA="public" ^
|
||||
calminer:latest
|
||||
```
|
||||
|
||||
If you maintain a Postgres or Redis dependency locally, consider authoring a `docker compose` stack that pairs them with the app container. The Docker image expects the database to be reachable and migrations executed before serving traffic.
|
||||
|
||||
## Usage Overview
|
||||
|
||||
- **API base URL**: `http://localhost:8000/api`
|
||||
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
||||
|
||||
## Dashboard Preview
|
||||
|
||||
1. Start the FastAPI server and navigate to `/`.
|
||||
2. Review the headline metrics, scenario snapshot table, and cost/activity charts sourced from the current database state.
|
||||
3. Use the "Refresh Dashboard" button to pull freshly aggregated data via `/ui/dashboard/data` without reloading the page.
|
||||
|
||||
## Testing
|
||||
|
||||
Run the unit test suite:
|
||||
|
||||
```powershell
|
||||
pytest
|
||||
```
|
||||
|
||||
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
||||
|
||||
## Migrations & Currency Backfill
|
||||
|
||||
The project includes a referential `currency` table and migration/backfill tooling to normalize legacy currency fields.
|
||||
|
||||
### Run migrations and backfill (development)
|
||||
|
||||
Configure the granular database settings in your PowerShell session before running migrations.
|
||||
|
||||
```powershell
|
||||
$env:DATABASE_DRIVER = 'postgresql'
|
||||
$env:DATABASE_HOST = 'localhost'
|
||||
$env:DATABASE_PORT = '5432'
|
||||
$env:DATABASE_USER = 'calminer'
|
||||
$env:DATABASE_PASSWORD = 's3cret'
|
||||
$env:DATABASE_NAME = 'calminer'
|
||||
$env:DATABASE_SCHEMA = 'public'
|
||||
python scripts/run_migrations.py
|
||||
python scripts/backfill_currency.py --dry-run
|
||||
python scripts/backfill_currency.py --create-missing
|
||||
```
|
||||
|
||||
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
||||
|
||||
Use `--dry-run` first to verify what will change.
|
||||
|
||||
## Database Objects
|
||||
|
||||
The database contains tables such as `capex`, `opex`, `chemical_consumption`, `fuel_consumption`, `water_consumption`, `scrap_consumption`, `production_output`, `equipment_operation`, `ore_batch`, `exchange_rate`, and `simulation_result`.
|
||||
|
||||
## Current implementation status (2025-10-21)
|
||||
|
||||
- Currency normalization: a `currency` table and backfill scripts exist; routes accept `currency_id` and `currency_code` for compatibility.
|
||||
- Simulation engine: scaffolding in `services/simulation.py` and `/api/simulations/run` return in-memory results; persistence to `models/simulation_result` is planned.
|
||||
- Reporting: `services/reporting.py` provides summary statistics used by `POST /api/reporting/summary`.
|
||||
- Tests & coverage: unit and E2E suites exist; recent local coverage is >90%.
|
||||
- Remaining work: authentication, persist simulation runs, CI/CD and containerization.
|
||||
|
||||
## Where to look next
|
||||
|
||||
- Architecture overview & chapters: [architecture](architecture/README.md) (per-chapter files under `docs/architecture/`)
|
||||
- [Testing & CI](architecture/14_testing_ci.md)
|
||||
- [Development setup](architecture/15_development_setup.md)
|
||||
- Implementation plan & roadmap: [Solution strategy](architecture/04_solution_strategy.md)
|
||||
- Routes: [routes](../routes/)
|
||||
- Services: [services](../services/)
|
||||
- Scripts: [scripts](../scripts/) (migrations and backfills)
|
||||
14
k8s/configmap.yaml
Normal file
14
k8s/configmap.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: calminer-config
|
||||
data:
|
||||
DATABASE_HOST: "calminer-db"
|
||||
DATABASE_PORT: "5432"
|
||||
DATABASE_USER: "calminer"
|
||||
DATABASE_NAME: "calminer_db"
|
||||
DATABASE_DRIVER: "postgresql"
|
||||
CALMINER_EXPORT_MAX_ROWS: "10000"
|
||||
CALMINER_EXPORT_METADATA: "true"
|
||||
CALMINER_IMPORT_STAGING_TTL: "300"
|
||||
CALMINER_IMPORT_MAX_ROWS: "50000"
|
||||
54
k8s/deployment.yaml
Normal file
54
k8s/deployment.yaml
Normal file
@@ -0,0 +1,54 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: calminer-app
|
||||
labels:
|
||||
app: calminer
|
||||
spec:
|
||||
replicas: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: calminer
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: calminer
|
||||
spec:
|
||||
containers:
|
||||
- name: calminer
|
||||
image: registry.example.com/calminer:latest
|
||||
ports:
|
||||
- containerPort: 8003
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: calminer-config
|
||||
- secretRef:
|
||||
name: calminer-secrets
|
||||
resources:
|
||||
requests:
|
||||
memory: "256Mi"
|
||||
cpu: "250m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "500m"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 8003
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 8003
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
initContainers:
|
||||
- name: wait-for-db
|
||||
image: postgres:17
|
||||
command:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"until pg_isready -h calminer-db -p 5432; do echo waiting for database; sleep 2; done;",
|
||||
]
|
||||
18
k8s/ingress.yaml
Normal file
18
k8s/ingress.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: calminer-ingress
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
spec:
|
||||
rules:
|
||||
- host: calminer.example.com
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: calminer-service
|
||||
port:
|
||||
number: 80
|
||||
13
k8s/postgres-service.yaml
Normal file
13
k8s/postgres-service.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: calminer-db
|
||||
labels:
|
||||
app: calminer-db
|
||||
spec:
|
||||
selector:
|
||||
app: calminer-db
|
||||
ports:
|
||||
- port: 5432
|
||||
targetPort: 5432
|
||||
clusterIP: None # Headless service for StatefulSet
|
||||
48
k8s/postgres.yaml
Normal file
48
k8s/postgres.yaml
Normal file
@@ -0,0 +1,48 @@
|
||||
apiVersion: apps/v1
|
||||
kind: StatefulSet
|
||||
metadata:
|
||||
name: calminer-db
|
||||
spec:
|
||||
serviceName: calminer-db
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: calminer-db
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: calminer-db
|
||||
spec:
|
||||
containers:
|
||||
- name: postgres
|
||||
image: postgres:17
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: "calminer"
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: calminer-secrets
|
||||
key: DATABASE_PASSWORD
|
||||
- name: POSTGRES_DB
|
||||
value: "calminer_db"
|
||||
resources:
|
||||
requests:
|
||||
memory: "256Mi"
|
||||
cpu: "250m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "500m"
|
||||
volumeMounts:
|
||||
- name: postgres-storage
|
||||
mountPath: /var/lib/postgresql/data
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: postgres-storage
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
8
k8s/secret.yaml
Normal file
8
k8s/secret.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: calminer-secrets
|
||||
type: Opaque
|
||||
data:
|
||||
DATABASE_PASSWORD: Y2FsbWluZXJfcGFzc3dvcmQ= # base64 encoded 'calminer_password'
|
||||
CALMINER_SEED_ADMIN_PASSWORD: Q2hhbmdlTWUxMjMh # base64 encoded 'ChangeMe123!'
|
||||
14
k8s/service.yaml
Normal file
14
k8s/service.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: calminer-service
|
||||
labels:
|
||||
app: calminer
|
||||
spec:
|
||||
selector:
|
||||
app: calminer
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: 8003
|
||||
protocol: TCP
|
||||
type: ClusterIP
|
||||
125
main.py
125
main.py
@@ -1,25 +1,88 @@
|
||||
from routes.distributions import router as distributions_router
|
||||
from routes.ui import router as ui_router
|
||||
from routes.parameters import router as parameters_router
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Awaitable, Callable
|
||||
|
||||
from fastapi import FastAPI, Request, Response
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from config.settings import get_settings
|
||||
from middleware.auth_session import AuthSessionMiddleware
|
||||
from middleware.metrics import MetricsMiddleware
|
||||
from middleware.validation import validate_json
|
||||
from config.database import Base, engine
|
||||
from routes.auth import router as auth_router
|
||||
from routes.dashboard import router as dashboard_router
|
||||
from routes.calculations import router as calculations_router
|
||||
from routes.imports import router as imports_router
|
||||
from routes.exports import router as exports_router
|
||||
from routes.projects import router as projects_router
|
||||
from routes.reports import router as reports_router
|
||||
from routes.scenarios import router as scenarios_router
|
||||
from routes.costs import router as costs_router
|
||||
from routes.consumption import router as consumption_router
|
||||
from routes.production import router as production_router
|
||||
from routes.equipment import router as equipment_router
|
||||
from routes.reporting import router as reporting_router
|
||||
from routes.simulations import router as simulations_router
|
||||
from routes.maintenance import router as maintenance_router
|
||||
from routes.ui import router as ui_router
|
||||
from routes.navigation import router as navigation_router
|
||||
from monitoring import router as monitoring_router
|
||||
from services.bootstrap import bootstrap_admin, bootstrap_pricing_settings
|
||||
from scripts.init_db import init_db as init_db_script
|
||||
|
||||
# Initialize database schema
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
async def _bootstrap_startup() -> None:
|
||||
settings = get_settings()
|
||||
admin_settings = settings.admin_bootstrap_settings()
|
||||
pricing_metadata = settings.pricing_metadata()
|
||||
try:
|
||||
try:
|
||||
init_db_script()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"DB initializer failed; continuing to bootstrap (non-fatal)")
|
||||
|
||||
role_result, admin_result = bootstrap_admin(settings=admin_settings)
|
||||
pricing_result = bootstrap_pricing_settings(metadata=pricing_metadata)
|
||||
logger.info(
|
||||
"Admin bootstrap completed: roles=%s created=%s updated=%s rotated=%s assigned=%s",
|
||||
role_result.ensured,
|
||||
admin_result.created_user,
|
||||
admin_result.updated_user,
|
||||
admin_result.password_rotated,
|
||||
admin_result.roles_granted,
|
||||
)
|
||||
try:
|
||||
seed = pricing_result.seed
|
||||
slug = getattr(seed.settings, "slug", None) if seed and getattr(
|
||||
seed, "settings", None) else None
|
||||
created = getattr(seed, "created", None)
|
||||
updated_fields = getattr(seed, "updated_fields", None)
|
||||
impurity_upserts = getattr(seed, "impurity_upserts", None)
|
||||
logger.info(
|
||||
"Pricing settings bootstrap completed: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
|
||||
slug,
|
||||
created,
|
||||
updated_fields,
|
||||
impurity_upserts,
|
||||
pricing_result.projects_assigned,
|
||||
)
|
||||
except Exception:
|
||||
logger.info(
|
||||
"Pricing settings bootstrap completed (partial): projects_assigned=%s",
|
||||
pricing_result.projects_assigned,
|
||||
)
|
||||
except Exception: # pragma: no cover - defensive logging
|
||||
logger.exception(
|
||||
"Failed to bootstrap administrator or pricing settings")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def app_lifespan(_: FastAPI):
|
||||
await _bootstrap_startup()
|
||||
yield
|
||||
|
||||
|
||||
app = FastAPI(lifespan=app_lifespan)
|
||||
|
||||
app.add_middleware(AuthSessionMiddleware)
|
||||
app.add_middleware(MetricsMiddleware)
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
@@ -28,17 +91,29 @@ async def json_validation(
|
||||
) -> Response:
|
||||
return await validate_json(request, call_next)
|
||||
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
|
||||
# Include API routers
|
||||
@app.get("/health", summary="Container health probe")
|
||||
async def health() -> dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/favicon.ico", include_in_schema=False)
|
||||
async def favicon() -> Response:
|
||||
static_directory = "static"
|
||||
favicon_img = "favicon.ico"
|
||||
return FileResponse(f"{static_directory}/{favicon_img}")
|
||||
|
||||
|
||||
app.include_router(dashboard_router)
|
||||
app.include_router(calculations_router)
|
||||
app.include_router(auth_router)
|
||||
app.include_router(imports_router)
|
||||
app.include_router(exports_router)
|
||||
app.include_router(projects_router)
|
||||
app.include_router(scenarios_router)
|
||||
app.include_router(parameters_router)
|
||||
app.include_router(distributions_router)
|
||||
app.include_router(costs_router)
|
||||
app.include_router(consumption_router)
|
||||
app.include_router(simulations_router)
|
||||
app.include_router(production_router)
|
||||
app.include_router(equipment_router)
|
||||
app.include_router(maintenance_router)
|
||||
app.include_router(reporting_router)
|
||||
app.include_router(reports_router)
|
||||
app.include_router(ui_router)
|
||||
app.include_router(monitoring_router)
|
||||
app.include_router(navigation_router)
|
||||
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
|
||||
218
middleware/auth_session.py
Normal file
218
middleware/auth_session.py
Normal file
@@ -0,0 +1,218 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Iterable, Optional
|
||||
|
||||
from fastapi import Request, Response
|
||||
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||
from starlette.types import ASGIApp
|
||||
|
||||
from config.settings import Settings, get_settings
|
||||
from sqlalchemy.orm.exc import DetachedInstanceError
|
||||
from models import User
|
||||
from monitoring.metrics import ACTIVE_CONNECTIONS
|
||||
from services.exceptions import EntityNotFoundError
|
||||
from services.security import (
|
||||
JWTSettings,
|
||||
TokenDecodeError,
|
||||
TokenError,
|
||||
TokenExpiredError,
|
||||
TokenTypeMismatchError,
|
||||
create_access_token,
|
||||
create_refresh_token,
|
||||
decode_access_token,
|
||||
decode_refresh_token,
|
||||
)
|
||||
from services.session import (
|
||||
AuthSession,
|
||||
SessionStrategy,
|
||||
SessionTokens,
|
||||
build_session_strategy,
|
||||
clear_session_cookies,
|
||||
extract_session_tokens,
|
||||
set_session_cookies,
|
||||
)
|
||||
from services.unit_of_work import UnitOfWork
|
||||
|
||||
_AUTH_SCOPE = "auth"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class _ResolutionResult:
|
||||
session: AuthSession
|
||||
strategy: SessionStrategy
|
||||
jwt_settings: JWTSettings
|
||||
|
||||
|
||||
class AuthSessionMiddleware(BaseHTTPMiddleware):
|
||||
"""Resolve authenticated users from session cookies and refresh tokens."""
|
||||
|
||||
_active_sessions: int = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: ASGIApp,
|
||||
*,
|
||||
settings_provider: Callable[[], Settings] = get_settings,
|
||||
unit_of_work_factory: Callable[[], UnitOfWork] = UnitOfWork,
|
||||
refresh_scopes: Iterable[str] | None = None,
|
||||
) -> None:
|
||||
super().__init__(app)
|
||||
self._settings_provider = settings_provider
|
||||
self._unit_of_work_factory = unit_of_work_factory
|
||||
self._refresh_scopes = tuple(
|
||||
refresh_scopes) if refresh_scopes else (_AUTH_SCOPE,)
|
||||
|
||||
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
||||
resolved = self._resolve_session(request)
|
||||
|
||||
# Track active sessions for authenticated users
|
||||
try:
|
||||
user_active = bool(resolved.session.user and getattr(
|
||||
resolved.session.user, "is_active", False))
|
||||
except DetachedInstanceError:
|
||||
user_active = False
|
||||
|
||||
if user_active:
|
||||
AuthSessionMiddleware._active_sessions += 1
|
||||
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
|
||||
|
||||
response: Response | None = None
|
||||
try:
|
||||
response = await call_next(request)
|
||||
return response
|
||||
finally:
|
||||
# Always decrement the active sessions counter if we incremented it.
|
||||
if user_active:
|
||||
AuthSessionMiddleware._active_sessions = max(
|
||||
0, AuthSessionMiddleware._active_sessions - 1)
|
||||
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
|
||||
|
||||
# Only apply session cookies if a response was produced by downstream
|
||||
# application. If an exception occurred before a response was created
|
||||
# we avoid raising another error here.
|
||||
import logging
|
||||
if response is not None:
|
||||
try:
|
||||
self._apply_session(response, resolved)
|
||||
except Exception:
|
||||
logging.getLogger(__name__).exception(
|
||||
"Failed to apply session cookies to response"
|
||||
)
|
||||
else:
|
||||
logging.getLogger(__name__).debug(
|
||||
"AuthSessionMiddleware: no response produced by downstream app (response is None)"
|
||||
)
|
||||
|
||||
def _resolve_session(self, request: Request) -> _ResolutionResult:
|
||||
settings = self._settings_provider()
|
||||
jwt_settings = settings.jwt_settings()
|
||||
strategy = build_session_strategy(settings.session_settings())
|
||||
|
||||
tokens = extract_session_tokens(request, strategy)
|
||||
session = AuthSession(tokens=tokens)
|
||||
request.state.auth_session = session
|
||||
|
||||
if tokens.access_token:
|
||||
if self._try_access_token(session, tokens, jwt_settings):
|
||||
return _ResolutionResult(session=session, strategy=strategy, jwt_settings=jwt_settings)
|
||||
|
||||
if tokens.refresh_token:
|
||||
self._try_refresh_token(
|
||||
session, tokens.refresh_token, jwt_settings)
|
||||
|
||||
return _ResolutionResult(session=session, strategy=strategy, jwt_settings=jwt_settings)
|
||||
|
||||
def _try_access_token(
|
||||
self,
|
||||
session: AuthSession,
|
||||
tokens: SessionTokens,
|
||||
jwt_settings: JWTSettings,
|
||||
) -> bool:
|
||||
try:
|
||||
payload = decode_access_token(
|
||||
tokens.access_token or "", jwt_settings)
|
||||
except TokenExpiredError:
|
||||
return False
|
||||
except (TokenDecodeError, TokenTypeMismatchError, TokenError):
|
||||
session.mark_cleared()
|
||||
return False
|
||||
|
||||
user = self._load_user(payload.sub)
|
||||
if not user or not user.is_active or _AUTH_SCOPE not in payload.scopes:
|
||||
session.mark_cleared()
|
||||
return False
|
||||
|
||||
session.user = user
|
||||
session.scopes = tuple(payload.scopes)
|
||||
session.set_role_slugs(role.name for role in getattr(user, "roles", []) if role)
|
||||
return True
|
||||
|
||||
def _try_refresh_token(
|
||||
self,
|
||||
session: AuthSession,
|
||||
refresh_token: str,
|
||||
jwt_settings: JWTSettings,
|
||||
) -> None:
|
||||
try:
|
||||
payload = decode_refresh_token(refresh_token, jwt_settings)
|
||||
except (TokenExpiredError, TokenDecodeError, TokenTypeMismatchError, TokenError):
|
||||
session.mark_cleared()
|
||||
return
|
||||
|
||||
user = self._load_user(payload.sub)
|
||||
if not user or not user.is_active or not self._is_refresh_scope_allowed(payload.scopes):
|
||||
session.mark_cleared()
|
||||
return
|
||||
|
||||
session.user = user
|
||||
session.scopes = tuple(payload.scopes)
|
||||
session.set_role_slugs(role.name for role in getattr(user, "roles", []) if role)
|
||||
|
||||
access_token = create_access_token(
|
||||
str(user.id),
|
||||
jwt_settings,
|
||||
scopes=payload.scopes,
|
||||
)
|
||||
new_refresh = create_refresh_token(
|
||||
str(user.id),
|
||||
jwt_settings,
|
||||
scopes=payload.scopes,
|
||||
)
|
||||
session.issue_tokens(access_token=access_token,
|
||||
refresh_token=new_refresh)
|
||||
|
||||
def _is_refresh_scope_allowed(self, scopes: Iterable[str]) -> bool:
|
||||
candidate_scopes = set(scopes)
|
||||
return any(scope in candidate_scopes for scope in self._refresh_scopes)
|
||||
|
||||
def _load_user(self, subject: str) -> Optional[User]:
|
||||
try:
|
||||
user_id = int(subject)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
with self._unit_of_work_factory() as uow:
|
||||
if not uow.users:
|
||||
return None
|
||||
try:
|
||||
user = uow.users.get(user_id, with_roles=True)
|
||||
except EntityNotFoundError:
|
||||
return None
|
||||
return user
|
||||
|
||||
def _apply_session(self, response: Response, resolved: _ResolutionResult) -> None:
|
||||
session = resolved.session
|
||||
if session.clear_cookies:
|
||||
clear_session_cookies(response, resolved.strategy)
|
||||
return
|
||||
|
||||
if session.issued_access_token:
|
||||
refresh_token = session.issued_refresh_token or session.tokens.refresh_token
|
||||
set_session_cookies(
|
||||
response,
|
||||
access_token=session.issued_access_token,
|
||||
refresh_token=refresh_token,
|
||||
strategy=resolved.strategy,
|
||||
jwt_settings=resolved.jwt_settings,
|
||||
)
|
||||
58
middleware/metrics.py
Normal file
58
middleware/metrics.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import Callable
|
||||
|
||||
from fastapi import Request, Response
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
from monitoring.metrics import observe_request
|
||||
from services.metrics import get_metrics_service
|
||||
|
||||
|
||||
class MetricsMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next: Callable[[Request], Response]) -> Response:
|
||||
start_time = time.time()
|
||||
response = await call_next(request)
|
||||
process_time = time.time() - start_time
|
||||
|
||||
observe_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status=response.status_code,
|
||||
seconds=process_time,
|
||||
)
|
||||
|
||||
# Store in database asynchronously
|
||||
background_tasks = getattr(request.state, "background_tasks", None)
|
||||
if background_tasks:
|
||||
background_tasks.add_task(
|
||||
store_request_metric,
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration_seconds=process_time,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
async def store_request_metric(
|
||||
method: str, endpoint: str, status_code: int, duration_seconds: float
|
||||
) -> None:
|
||||
"""Store request metric in database."""
|
||||
try:
|
||||
service = get_metrics_service()
|
||||
service.store_metric(
|
||||
metric_name="http_request",
|
||||
value=duration_seconds,
|
||||
labels={"method": method, "endpoint": endpoint,
|
||||
"status": status_code},
|
||||
endpoint=endpoint,
|
||||
method=method,
|
||||
status_code=status_code,
|
||||
duration_seconds=duration_seconds,
|
||||
)
|
||||
except Exception:
|
||||
# Log error but don't fail the request
|
||||
pass
|
||||
@@ -4,13 +4,20 @@ from fastapi import HTTPException, Request, Response
|
||||
|
||||
MiddlewareCallNext = Callable[[Request], Awaitable[Response]]
|
||||
|
||||
async def validate_json(request: Request, call_next: MiddlewareCallNext) -> Response:
|
||||
|
||||
async def validate_json(
|
||||
request: Request, call_next: MiddlewareCallNext
|
||||
) -> Response:
|
||||
# Only validate JSON for requests with a body
|
||||
if request.method in ("POST", "PUT", "PATCH"):
|
||||
try:
|
||||
# attempt to parse json body
|
||||
await request.json()
|
||||
except Exception:
|
||||
raise HTTPException(status_code=400, detail="Invalid JSON payload")
|
||||
# Only attempt JSON parsing when the client indicates a JSON content type.
|
||||
content_type = (request.headers.get("content-type") or "").lower()
|
||||
if "json" in content_type:
|
||||
try:
|
||||
# attempt to parse json body
|
||||
await request.json()
|
||||
except Exception:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Invalid JSON payload")
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
@@ -1,5 +1,72 @@
|
||||
"""
|
||||
models package initializer. Import the currency model so it's registered
|
||||
with the shared Base.metadata when the package is imported by tests.
|
||||
"""
|
||||
from . import currency # noqa: F401
|
||||
"""Database models and shared metadata for the CalMiner domain."""
|
||||
|
||||
from .financial_input import FinancialInput
|
||||
from .metadata import (
|
||||
COST_BUCKET_METADATA,
|
||||
RESOURCE_METADATA,
|
||||
STOCHASTIC_VARIABLE_METADATA,
|
||||
ResourceDescriptor,
|
||||
StochasticVariableDescriptor,
|
||||
)
|
||||
from .performance_metric import PerformanceMetric
|
||||
from .pricing_settings import (
|
||||
PricingImpuritySettings,
|
||||
PricingMetalSettings,
|
||||
PricingSettings,
|
||||
)
|
||||
from .enums import (
|
||||
CostBucket,
|
||||
DistributionType,
|
||||
FinancialCategory,
|
||||
MiningOperationType,
|
||||
ResourceType,
|
||||
ScenarioStatus,
|
||||
StochasticVariable,
|
||||
)
|
||||
from .project import Project
|
||||
from .scenario import Scenario
|
||||
from .simulation_parameter import SimulationParameter
|
||||
from .user import Role, User, UserRole, password_context
|
||||
from .navigation import NavigationGroup, NavigationLink
|
||||
|
||||
from .profitability_snapshot import ProjectProfitability, ScenarioProfitability
|
||||
from .capex_snapshot import ProjectCapexSnapshot, ScenarioCapexSnapshot
|
||||
from .opex_snapshot import (
|
||||
ProjectOpexSnapshot,
|
||||
ScenarioOpexSnapshot,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"FinancialCategory",
|
||||
"FinancialInput",
|
||||
"MiningOperationType",
|
||||
"Project",
|
||||
"ProjectProfitability",
|
||||
"ProjectCapexSnapshot",
|
||||
"ProjectOpexSnapshot",
|
||||
"PricingSettings",
|
||||
"PricingMetalSettings",
|
||||
"PricingImpuritySettings",
|
||||
"Scenario",
|
||||
"ScenarioProfitability",
|
||||
"ScenarioCapexSnapshot",
|
||||
"ScenarioOpexSnapshot",
|
||||
"ScenarioStatus",
|
||||
"DistributionType",
|
||||
"SimulationParameter",
|
||||
"ResourceType",
|
||||
"CostBucket",
|
||||
"StochasticVariable",
|
||||
"RESOURCE_METADATA",
|
||||
"COST_BUCKET_METADATA",
|
||||
"STOCHASTIC_VARIABLE_METADATA",
|
||||
"ResourceDescriptor",
|
||||
"StochasticVariableDescriptor",
|
||||
"User",
|
||||
"Role",
|
||||
"UserRole",
|
||||
"password_context",
|
||||
"PerformanceMetric",
|
||||
"NavigationGroup",
|
||||
"NavigationLink",
|
||||
]
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
from sqlalchemy import event, text
|
||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Capex(Base):
|
||||
__tablename__ = "capex"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
amount = Column(Float, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
currency_id = Column(Integer, ForeignKey("currency.id"), nullable=False)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="capex_items")
|
||||
currency = relationship("Currency", back_populates="capex_items")
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<Capex id={self.id} scenario_id={self.scenario_id} "
|
||||
f"amount={self.amount} currency_id={self.currency_id}>"
|
||||
)
|
||||
|
||||
@property
|
||||
def currency_code(self) -> str:
|
||||
return self.currency.code if self.currency else None
|
||||
|
||||
@currency_code.setter
|
||||
def currency_code(self, value: str) -> None:
|
||||
# store pending code so application code or migrations can pick it up
|
||||
setattr(self, "_currency_code_pending",
|
||||
(value or "USD").strip().upper())
|
||||
|
||||
|
||||
# SQLAlchemy event handlers to ensure currency_id is set before insert/update
|
||||
|
||||
|
||||
def _resolve_currency(mapper, connection, target):
|
||||
# If currency_id already set, nothing to do
|
||||
if getattr(target, "currency_id", None):
|
||||
return
|
||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
||||
# Try to find existing currency id
|
||||
row = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
||||
"code": code}).fetchone()
|
||||
if row:
|
||||
cid = row[0]
|
||||
else:
|
||||
# Insert new currency and attempt to get lastrowid
|
||||
res = connection.execute(
|
||||
text("INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"),
|
||||
{"code": code, "name": code, "symbol": None, "active": True},
|
||||
)
|
||||
try:
|
||||
cid = res.lastrowid
|
||||
except Exception:
|
||||
# fallback: select after insert
|
||||
cid = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
||||
"code": code}).scalar()
|
||||
target.currency_id = cid
|
||||
|
||||
|
||||
event.listen(Capex, "before_insert", _resolve_currency)
|
||||
event.listen(Capex, "before_update", _resolve_currency)
|
||||
111
models/capex_snapshot.py
Normal file
111
models/capex_snapshot.py
Normal file
@@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import JSON, DateTime, ForeignKey, Integer, Numeric, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .project import Project
|
||||
from .scenario import Scenario
|
||||
from .user import User
|
||||
|
||||
|
||||
class ProjectCapexSnapshot(Base):
|
||||
"""Snapshot of aggregated capex metrics at the project level."""
|
||||
|
||||
__tablename__ = "project_capex_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
total_capex: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
contingency_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
contingency_amount: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
total_with_contingency: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
project: Mapped[Project] = relationship(
|
||||
"Project", back_populates="capex_snapshots"
|
||||
)
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ProjectCapexSnapshot(id={id!r}, project_id={project_id!r}, total_capex={total_capex!r})".format(
|
||||
id=self.id, project_id=self.project_id, total_capex=self.total_capex
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ScenarioCapexSnapshot(Base):
|
||||
"""Snapshot of capex metrics for an individual scenario."""
|
||||
|
||||
__tablename__ = "scenario_capex_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
total_capex: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
contingency_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
contingency_amount: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
total_with_contingency: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenario: Mapped[Scenario] = relationship(
|
||||
"Scenario", back_populates="capex_snapshots"
|
||||
)
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ScenarioCapexSnapshot(id={id!r}, scenario_id={scenario_id!r}, total_capex={total_capex!r})".format(
|
||||
id=self.id, scenario_id=self.scenario_id, total_capex=self.total_capex
|
||||
)
|
||||
)
|
||||
@@ -1,22 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Consumption(Base):
|
||||
__tablename__ = "consumption"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
amount = Column(Float, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
unit_name = Column(String(64), nullable=True)
|
||||
unit_symbol = Column(String(16), nullable=True)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="consumption_items")
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<Consumption id={self.id} scenario_id={self.scenario_id} "
|
||||
f"amount={self.amount} unit={self.unit_symbol or self.unit_name}>"
|
||||
)
|
||||
@@ -1,21 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, String, Boolean
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Currency(Base):
|
||||
__tablename__ = "currency"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
code = Column(String(3), nullable=False, unique=True, index=True)
|
||||
name = Column(String(128), nullable=False)
|
||||
symbol = Column(String(8), nullable=True)
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# reverse relationships (optional)
|
||||
capex_items = relationship(
|
||||
"Capex", back_populates="currency", lazy="select")
|
||||
opex_items = relationship("Opex", back_populates="currency", lazy="select")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Currency code={self.code} name={self.name} symbol={self.symbol}>"
|
||||
@@ -1,14 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, String, JSON
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Distribution(Base):
|
||||
__tablename__ = "distribution"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, nullable=False)
|
||||
distribution_type = Column(String, nullable=False)
|
||||
parameters = Column(JSON, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Distribution id={self.id} name={self.name} type={self.distribution_type}>"
|
||||
96
models/enums.py
Normal file
96
models/enums.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import Type
|
||||
|
||||
from sqlalchemy import Enum as SQLEnum
|
||||
|
||||
|
||||
def sql_enum(enum_cls: Type[Enum], *, name: str) -> SQLEnum:
|
||||
"""Build a SQLAlchemy Enum that maps using the enum member values."""
|
||||
|
||||
return SQLEnum(
|
||||
enum_cls,
|
||||
name=name,
|
||||
create_type=False,
|
||||
validate_strings=True,
|
||||
values_callable=lambda enum_cls: [member.value for member in enum_cls],
|
||||
)
|
||||
|
||||
|
||||
class MiningOperationType(str, Enum):
|
||||
"""Supported mining operation categories."""
|
||||
|
||||
OPEN_PIT = "open_pit"
|
||||
UNDERGROUND = "underground"
|
||||
IN_SITU_LEACH = "in_situ_leach"
|
||||
PLACER = "placer"
|
||||
QUARRY = "quarry"
|
||||
MOUNTAINTOP_REMOVAL = "mountaintop_removal"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
class ScenarioStatus(str, Enum):
|
||||
"""Lifecycle states for project scenarios."""
|
||||
|
||||
DRAFT = "draft"
|
||||
ACTIVE = "active"
|
||||
ARCHIVED = "archived"
|
||||
|
||||
|
||||
class FinancialCategory(str, Enum):
|
||||
"""Enumeration of cost and revenue classifications."""
|
||||
|
||||
CAPITAL_EXPENDITURE = "capex"
|
||||
OPERATING_EXPENDITURE = "opex"
|
||||
REVENUE = "revenue"
|
||||
CONTINGENCY = "contingency"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
class DistributionType(str, Enum):
|
||||
"""Supported stochastic distribution families for simulations."""
|
||||
|
||||
NORMAL = "normal"
|
||||
TRIANGULAR = "triangular"
|
||||
UNIFORM = "uniform"
|
||||
LOGNORMAL = "lognormal"
|
||||
CUSTOM = "custom"
|
||||
|
||||
|
||||
class ResourceType(str, Enum):
|
||||
"""Primary consumables and resources used in mining operations."""
|
||||
|
||||
DIESEL = "diesel"
|
||||
ELECTRICITY = "electricity"
|
||||
WATER = "water"
|
||||
EXPLOSIVES = "explosives"
|
||||
REAGENTS = "reagents"
|
||||
LABOR = "labor"
|
||||
EQUIPMENT_HOURS = "equipment_hours"
|
||||
TAILINGS_CAPACITY = "tailings_capacity"
|
||||
|
||||
|
||||
class CostBucket(str, Enum):
|
||||
"""Granular cost buckets aligned with project accounting."""
|
||||
|
||||
CAPITAL_INITIAL = "capital_initial"
|
||||
CAPITAL_SUSTAINING = "capital_sustaining"
|
||||
OPERATING_FIXED = "operating_fixed"
|
||||
OPERATING_VARIABLE = "operating_variable"
|
||||
MAINTENANCE = "maintenance"
|
||||
RECLAMATION = "reclamation"
|
||||
ROYALTIES = "royalties"
|
||||
GENERAL_ADMIN = "general_admin"
|
||||
|
||||
|
||||
class StochasticVariable(str, Enum):
|
||||
"""Domain variables that typically require probabilistic modelling."""
|
||||
|
||||
ORE_GRADE = "ore_grade"
|
||||
RECOVERY_RATE = "recovery_rate"
|
||||
METAL_PRICE = "metal_price"
|
||||
OPERATING_COST = "operating_cost"
|
||||
CAPITAL_COST = "capital_cost"
|
||||
DISCOUNT_RATE = "discount_rate"
|
||||
THROUGHPUT = "throughput"
|
||||
@@ -1,17 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, String, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Equipment(Base):
|
||||
__tablename__ = "equipment"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
name = Column(String, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="equipment_items")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Equipment id={self.id} scenario_id={self.scenario_id} name={self.name}>"
|
||||
62
models/financial_input.py
Normal file
62
models/financial_input.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import (
|
||||
Date,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
from .enums import CostBucket, FinancialCategory, sql_enum
|
||||
from services.currency import normalise_currency
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .scenario import Scenario
|
||||
|
||||
|
||||
class FinancialInput(Base):
|
||||
"""Line-item financial assumption attached to a scenario."""
|
||||
|
||||
__tablename__ = "financial_inputs"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
category: Mapped[FinancialCategory] = mapped_column(
|
||||
sql_enum(FinancialCategory, name="financialcategory"), nullable=False
|
||||
)
|
||||
cost_bucket: Mapped[CostBucket | None] = mapped_column(
|
||||
sql_enum(CostBucket, name="costbucket"), nullable=True
|
||||
)
|
||||
amount: Mapped[float] = mapped_column(Numeric(18, 2), nullable=False)
|
||||
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
effective_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenario: Mapped["Scenario"] = relationship(
|
||||
"Scenario", back_populates="financial_inputs")
|
||||
|
||||
@validates("currency")
|
||||
def _validate_currency(self, key: str, value: str | None) -> str | None:
|
||||
return normalise_currency(value)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"FinancialInput(id={self.id!r}, scenario_id={self.scenario_id!r}, name={self.name!r})"
|
||||
31
models/import_export_log.py
Normal file
31
models/import_export_log.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class ImportExportLog(Base):
|
||||
"""Audit log for import and export operations."""
|
||||
|
||||
__tablename__ = "import_export_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
action = Column(String(32), nullable=False) # preview, commit, export
|
||||
dataset = Column(String(32), nullable=False) # projects, scenarios, etc.
|
||||
status = Column(String(16), nullable=False) # success, failure
|
||||
filename = Column(String(255), nullable=True)
|
||||
row_count = Column(Integer, nullable=True)
|
||||
detail = Column(Text, nullable=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
f"ImportExportLog(id={self.id}, action={self.action}, "
|
||||
f"dataset={self.dataset}, status={self.status})"
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
from sqlalchemy import Column, Date, Float, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Maintenance(Base):
|
||||
__tablename__ = "maintenance"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
equipment_id = Column(Integer, ForeignKey("equipment.id"), nullable=False)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
maintenance_date = Column(Date, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
cost = Column(Float, nullable=False)
|
||||
|
||||
equipment = relationship("Equipment")
|
||||
scenario = relationship("Scenario", back_populates="maintenance_items")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<Maintenance id={self.id} equipment_id={self.equipment_id} "
|
||||
f"scenario_id={self.scenario_id} date={self.maintenance_date} cost={self.cost}>"
|
||||
)
|
||||
108
models/metadata.py
Normal file
108
models/metadata.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from .enums import ResourceType, CostBucket, StochasticVariable
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ResourceDescriptor:
|
||||
"""Describes canonical metadata for a resource type."""
|
||||
|
||||
unit: str
|
||||
description: str
|
||||
|
||||
|
||||
RESOURCE_METADATA: dict[ResourceType, ResourceDescriptor] = {
|
||||
ResourceType.DIESEL: ResourceDescriptor(unit="L", description="Diesel fuel consumption"),
|
||||
ResourceType.ELECTRICITY: ResourceDescriptor(unit="kWh", description="Electrical power usage"),
|
||||
ResourceType.WATER: ResourceDescriptor(unit="m3", description="Process and dust suppression water"),
|
||||
ResourceType.EXPLOSIVES: ResourceDescriptor(unit="kg", description="Blasting agent consumption"),
|
||||
ResourceType.REAGENTS: ResourceDescriptor(unit="kg", description="Processing reagents"),
|
||||
ResourceType.LABOR: ResourceDescriptor(unit="hours", description="Direct labor hours"),
|
||||
ResourceType.EQUIPMENT_HOURS: ResourceDescriptor(unit="hours", description="Mobile equipment operating hours"),
|
||||
ResourceType.TAILINGS_CAPACITY: ResourceDescriptor(unit="m3", description="Tailings storage usage"),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CostBucketDescriptor:
|
||||
"""Describes reporting label and guidance for a cost bucket."""
|
||||
|
||||
label: str
|
||||
description: str
|
||||
|
||||
|
||||
COST_BUCKET_METADATA: dict[CostBucket, CostBucketDescriptor] = {
|
||||
CostBucket.CAPITAL_INITIAL: CostBucketDescriptor(
|
||||
label="Initial Capital",
|
||||
description="Pre-production capital required to construct the mine",
|
||||
),
|
||||
CostBucket.CAPITAL_SUSTAINING: CostBucketDescriptor(
|
||||
label="Sustaining Capital",
|
||||
description="Ongoing capital investments to maintain operations",
|
||||
),
|
||||
CostBucket.OPERATING_FIXED: CostBucketDescriptor(
|
||||
label="Fixed Operating",
|
||||
description="Fixed operating costs independent of production rate",
|
||||
),
|
||||
CostBucket.OPERATING_VARIABLE: CostBucketDescriptor(
|
||||
label="Variable Operating",
|
||||
description="Costs that scale with throughput or production",
|
||||
),
|
||||
CostBucket.MAINTENANCE: CostBucketDescriptor(
|
||||
label="Maintenance",
|
||||
description="Maintenance and repair expenditures",
|
||||
),
|
||||
CostBucket.RECLAMATION: CostBucketDescriptor(
|
||||
label="Reclamation",
|
||||
description="Mine closure and reclamation liabilities",
|
||||
),
|
||||
CostBucket.ROYALTIES: CostBucketDescriptor(
|
||||
label="Royalties",
|
||||
description="Royalty and streaming obligations",
|
||||
),
|
||||
CostBucket.GENERAL_ADMIN: CostBucketDescriptor(
|
||||
label="G&A",
|
||||
description="Corporate and site general and administrative costs",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StochasticVariableDescriptor:
|
||||
"""Metadata describing how a stochastic variable is typically modelled."""
|
||||
|
||||
unit: str
|
||||
description: str
|
||||
|
||||
|
||||
STOCHASTIC_VARIABLE_METADATA: dict[StochasticVariable, StochasticVariableDescriptor] = {
|
||||
StochasticVariable.ORE_GRADE: StochasticVariableDescriptor(
|
||||
unit="g/t",
|
||||
description="Head grade variability across the ore body",
|
||||
),
|
||||
StochasticVariable.RECOVERY_RATE: StochasticVariableDescriptor(
|
||||
unit="%",
|
||||
description="Metallurgical recovery uncertainty",
|
||||
),
|
||||
StochasticVariable.METAL_PRICE: StochasticVariableDescriptor(
|
||||
unit="$/unit",
|
||||
description="Commodity price fluctuations",
|
||||
),
|
||||
StochasticVariable.OPERATING_COST: StochasticVariableDescriptor(
|
||||
unit="$/t",
|
||||
description="Operating cost per tonne volatility",
|
||||
),
|
||||
StochasticVariable.CAPITAL_COST: StochasticVariableDescriptor(
|
||||
unit="$",
|
||||
description="Capital cost overrun/underrun potential",
|
||||
),
|
||||
StochasticVariable.DISCOUNT_RATE: StochasticVariableDescriptor(
|
||||
unit="%",
|
||||
description="Discount rate sensitivity",
|
||||
),
|
||||
StochasticVariable.THROUGHPUT: StochasticVariableDescriptor(
|
||||
unit="t/d",
|
||||
description="Plant throughput variability",
|
||||
),
|
||||
}
|
||||
125
models/navigation.py
Normal file
125
models/navigation.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
CheckConstraint,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
String,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.ext.mutable import MutableList
|
||||
from sqlalchemy import JSON
|
||||
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class NavigationGroup(Base):
|
||||
__tablename__ = "navigation_groups"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("slug", name="uq_navigation_groups_slug"),
|
||||
Index("ix_navigation_groups_sort_order", "sort_order"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
slug: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
label: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
sort_order: Mapped[int] = mapped_column(
|
||||
Integer, nullable=False, default=100)
|
||||
icon: Mapped[Optional[str]] = mapped_column(String(64))
|
||||
tooltip: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
is_enabled: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
links: Mapped[List["NavigationLink"]] = relationship(
|
||||
"NavigationLink",
|
||||
back_populates="group",
|
||||
cascade="all, delete-orphan",
|
||||
order_by="NavigationLink.sort_order",
|
||||
)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"NavigationGroup(id={self.id!r}, slug={self.slug!r})"
|
||||
|
||||
|
||||
class NavigationLink(Base):
|
||||
__tablename__ = "navigation_links"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("group_id", "slug",
|
||||
name="uq_navigation_links_group_slug"),
|
||||
Index("ix_navigation_links_group_sort", "group_id", "sort_order"),
|
||||
Index("ix_navigation_links_parent_sort",
|
||||
"parent_link_id", "sort_order"),
|
||||
CheckConstraint(
|
||||
"(route_name IS NOT NULL) OR (href_override IS NOT NULL)",
|
||||
name="ck_navigation_links_route_or_href",
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
group_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("navigation_groups.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
parent_link_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey("navigation_links.id", ondelete="CASCADE")
|
||||
)
|
||||
slug: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
label: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
route_name: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
href_override: Mapped[Optional[str]] = mapped_column(String(512))
|
||||
match_prefix: Mapped[Optional[str]] = mapped_column(String(512))
|
||||
sort_order: Mapped[int] = mapped_column(
|
||||
Integer, nullable=False, default=100)
|
||||
icon: Mapped[Optional[str]] = mapped_column(String(64))
|
||||
tooltip: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
required_roles: Mapped[list[str]] = mapped_column(
|
||||
MutableList.as_mutable(JSON), nullable=False, default=list
|
||||
)
|
||||
is_enabled: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True)
|
||||
is_external: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
group: Mapped[NavigationGroup] = relationship(
|
||||
NavigationGroup,
|
||||
back_populates="links",
|
||||
)
|
||||
parent: Mapped[Optional["NavigationLink"]] = relationship(
|
||||
"NavigationLink",
|
||||
remote_side="NavigationLink.id",
|
||||
back_populates="children",
|
||||
)
|
||||
children: Mapped[List["NavigationLink"]] = relationship(
|
||||
"NavigationLink",
|
||||
back_populates="parent",
|
||||
cascade="all, delete-orphan",
|
||||
order_by="NavigationLink.sort_order",
|
||||
)
|
||||
|
||||
def is_visible_for_roles(self, roles: list[str]) -> bool:
|
||||
if not self.required_roles:
|
||||
return True
|
||||
role_set = set(roles)
|
||||
return any(role in role_set for role in self.required_roles)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"NavigationLink(id={self.id!r}, slug={self.slug!r})"
|
||||
@@ -1,57 +0,0 @@
|
||||
from sqlalchemy import event, text
|
||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Opex(Base):
|
||||
__tablename__ = "opex"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
amount = Column(Float, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
currency_id = Column(Integer, ForeignKey("currency.id"), nullable=False)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="opex_items")
|
||||
currency = relationship("Currency", back_populates="opex_items")
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<Opex id={self.id} scenario_id={self.scenario_id} "
|
||||
f"amount={self.amount} currency_id={self.currency_id}>"
|
||||
)
|
||||
|
||||
@property
|
||||
def currency_code(self) -> str:
|
||||
return self.currency.code if self.currency else None
|
||||
|
||||
@currency_code.setter
|
||||
def currency_code(self, value: str) -> None:
|
||||
setattr(self, "_currency_code_pending",
|
||||
(value or "USD").strip().upper())
|
||||
|
||||
|
||||
def _resolve_currency_opex(mapper, connection, target):
|
||||
if getattr(target, "currency_id", None):
|
||||
return
|
||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
||||
row = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
||||
"code": code}).fetchone()
|
||||
if row:
|
||||
cid = row[0]
|
||||
else:
|
||||
res = connection.execute(
|
||||
text("INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"),
|
||||
{"code": code, "name": code, "symbol": None, "active": True},
|
||||
)
|
||||
try:
|
||||
cid = res.lastrowid
|
||||
except Exception:
|
||||
cid = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
||||
"code": code}).scalar()
|
||||
target.currency_id = cid
|
||||
|
||||
|
||||
event.listen(Opex, "before_insert", _resolve_currency_opex)
|
||||
event.listen(Opex, "before_update", _resolve_currency_opex)
|
||||
123
models/opex_snapshot.py
Normal file
123
models/opex_snapshot.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import JSON, Boolean, DateTime, ForeignKey, Integer, Numeric, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .project import Project
|
||||
from .scenario import Scenario
|
||||
from .user import User
|
||||
|
||||
|
||||
class ProjectOpexSnapshot(Base):
|
||||
"""Snapshot of recurring opex metrics at the project level."""
|
||||
|
||||
__tablename__ = "project_opex_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
overall_annual: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
escalated_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
annual_average: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
evaluation_horizon_years: Mapped[int | None] = mapped_column(
|
||||
Integer, nullable=True)
|
||||
escalation_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
apply_escalation: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True)
|
||||
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
project: Mapped[Project] = relationship(
|
||||
"Project", back_populates="opex_snapshots"
|
||||
)
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ProjectOpexSnapshot(id={id!r}, project_id={project_id!r}, overall_annual={overall_annual!r})".format(
|
||||
id=self.id,
|
||||
project_id=self.project_id,
|
||||
overall_annual=self.overall_annual,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ScenarioOpexSnapshot(Base):
|
||||
"""Snapshot of opex metrics for an individual scenario."""
|
||||
|
||||
__tablename__ = "scenario_opex_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
overall_annual: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
escalated_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
annual_average: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
evaluation_horizon_years: Mapped[int | None] = mapped_column(
|
||||
Integer, nullable=True)
|
||||
escalation_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
apply_escalation: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True)
|
||||
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenario: Mapped[Scenario] = relationship(
|
||||
"Scenario", back_populates="opex_snapshots"
|
||||
)
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ScenarioOpexSnapshot(id={id!r}, scenario_id={scenario_id!r}, overall_annual={overall_annual!r})".format(
|
||||
id=self.id,
|
||||
scenario_id=self.scenario_id,
|
||||
overall_annual=self.overall_annual,
|
||||
)
|
||||
)
|
||||
@@ -1,26 +0,0 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey, JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class Parameter(Base):
|
||||
__tablename__ = "parameter"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenario.id"), nullable=False)
|
||||
name: Mapped[str] = mapped_column(nullable=False)
|
||||
value: Mapped[float] = mapped_column(nullable=False)
|
||||
distribution_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey("distribution.id"), nullable=True)
|
||||
distribution_type: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||
distribution_parameters: Mapped[Optional[Dict[str, Any]]] = mapped_column(
|
||||
JSON, nullable=True)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="parameters")
|
||||
distribution = relationship("Distribution")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Parameter id={self.id} name={self.name} value={self.value}>"
|
||||
24
models/performance_metric.py
Normal file
24
models/performance_metric.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, Integer, String
|
||||
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class PerformanceMetric(Base):
|
||||
__tablename__ = "performance_metrics"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
timestamp = Column(DateTime, default=datetime.utcnow, index=True)
|
||||
metric_name = Column(String, index=True)
|
||||
value = Column(Float)
|
||||
labels = Column(String) # JSON string of labels
|
||||
endpoint = Column(String, index=True, nullable=True)
|
||||
method = Column(String, nullable=True)
|
||||
status_code = Column(Integer, nullable=True)
|
||||
duration_seconds = Column(Float, nullable=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<PerformanceMetric(id={self.id}, name={self.metric_name}, value={self.value})>"
|
||||
176
models/pricing_settings.py
Normal file
176
models/pricing_settings.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""Database models for persisted pricing configuration settings."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
Text,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
from services.currency import normalise_currency
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .project import Project
|
||||
|
||||
|
||||
class PricingSettings(Base):
|
||||
"""Persisted pricing defaults applied to scenario evaluations."""
|
||||
|
||||
__tablename__ = "pricing_settings"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String(128), nullable=False, unique=True)
|
||||
slug: Mapped[str] = mapped_column(String(64), nullable=False, unique=True)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
default_currency: Mapped[str | None] = mapped_column(
|
||||
String(3), nullable=True)
|
||||
default_payable_pct: Mapped[float] = mapped_column(
|
||||
Numeric(5, 2), nullable=False, default=100.0
|
||||
)
|
||||
moisture_threshold_pct: Mapped[float] = mapped_column(
|
||||
Numeric(5, 2), nullable=False, default=8.0
|
||||
)
|
||||
moisture_penalty_per_pct: Mapped[float] = mapped_column(
|
||||
Numeric(14, 4), nullable=False, default=0.0
|
||||
)
|
||||
metadata_payload: Mapped[dict | None] = mapped_column(
|
||||
"metadata", JSON, nullable=True
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
metal_overrides: Mapped[list["PricingMetalSettings"]] = relationship(
|
||||
"PricingMetalSettings",
|
||||
back_populates="pricing_settings",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
impurity_overrides: Mapped[list["PricingImpuritySettings"]] = relationship(
|
||||
"PricingImpuritySettings",
|
||||
back_populates="pricing_settings",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
projects: Mapped[list["Project"]] = relationship(
|
||||
"Project",
|
||||
back_populates="pricing_settings",
|
||||
cascade="all",
|
||||
)
|
||||
|
||||
@validates("slug")
|
||||
def _normalise_slug(self, key: str, value: str) -> str:
|
||||
return value.strip().lower()
|
||||
|
||||
@validates("default_currency")
|
||||
def _validate_currency(self, key: str, value: str | None) -> str | None:
|
||||
return normalise_currency(value)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"PricingSettings(id={self.id!r}, slug={self.slug!r})"
|
||||
|
||||
|
||||
class PricingMetalSettings(Base):
|
||||
"""Contract-specific overrides for a particular metal."""
|
||||
|
||||
__tablename__ = "pricing_metal_settings"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"pricing_settings_id", "metal_code", name="uq_pricing_metal_settings_code"
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
pricing_settings_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("pricing_settings.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
metal_code: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||
payable_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(5, 2), nullable=True)
|
||||
moisture_threshold_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(5, 2), nullable=True)
|
||||
moisture_penalty_per_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(14, 4), nullable=True
|
||||
)
|
||||
data: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
pricing_settings: Mapped["PricingSettings"] = relationship(
|
||||
"PricingSettings", back_populates="metal_overrides"
|
||||
)
|
||||
|
||||
@validates("metal_code")
|
||||
def _normalise_metal_code(self, key: str, value: str) -> str:
|
||||
return value.strip().lower()
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"PricingMetalSettings(" # noqa: ISC001
|
||||
f"id={self.id!r}, pricing_settings_id={self.pricing_settings_id!r}, "
|
||||
f"metal_code={self.metal_code!r})"
|
||||
)
|
||||
|
||||
|
||||
class PricingImpuritySettings(Base):
|
||||
"""Impurity penalty thresholds associated with pricing settings."""
|
||||
|
||||
__tablename__ = "pricing_impurity_settings"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"pricing_settings_id",
|
||||
"impurity_code",
|
||||
name="uq_pricing_impurity_settings_code",
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
pricing_settings_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("pricing_settings.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
impurity_code: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||
threshold_ppm: Mapped[float] = mapped_column(
|
||||
Numeric(14, 4), nullable=False, default=0.0)
|
||||
penalty_per_ppm: Mapped[float] = mapped_column(
|
||||
Numeric(14, 4), nullable=False, default=0.0)
|
||||
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
pricing_settings: Mapped["PricingSettings"] = relationship(
|
||||
"PricingSettings", back_populates="impurity_overrides"
|
||||
)
|
||||
|
||||
@validates("impurity_code")
|
||||
def _normalise_impurity_code(self, key: str, value: str) -> str:
|
||||
return value.strip().upper()
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"PricingImpuritySettings(" # noqa: ISC001
|
||||
f"id={self.id!r}, pricing_settings_id={self.pricing_settings_id!r}, "
|
||||
f"impurity_code={self.impurity_code!r})"
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class ProductionOutput(Base):
|
||||
__tablename__ = "production_output"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
amount = Column(Float, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
unit_name = Column(String(64), nullable=True)
|
||||
unit_symbol = Column(String(16), nullable=True)
|
||||
|
||||
scenario = relationship(
|
||||
"Scenario", back_populates="production_output_items")
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<ProductionOutput id={self.id} scenario_id={self.scenario_id} "
|
||||
f"amount={self.amount} unit={self.unit_symbol or self.unit_name}>"
|
||||
)
|
||||
133
models/profitability_snapshot.py
Normal file
133
models/profitability_snapshot.py
Normal file
@@ -0,0 +1,133 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import JSON, DateTime, ForeignKey, Integer, Numeric, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .project import Project
|
||||
from .scenario import Scenario
|
||||
from .user import User
|
||||
|
||||
|
||||
class ProjectProfitability(Base):
|
||||
"""Snapshot of aggregated profitability metrics at the project level."""
|
||||
|
||||
__tablename__ = "project_profitability_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
npv: Mapped[float | None] = mapped_column(Numeric(18, 2), nullable=True)
|
||||
irr_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
payback_period_years: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 4), nullable=True
|
||||
)
|
||||
margin_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
revenue_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
opex_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
sustaining_capex_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
capex: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
net_cash_flow_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
project: Mapped[Project] = relationship(
|
||||
"Project", back_populates="profitability_snapshots")
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ProjectProfitability(id={id!r}, project_id={project_id!r}, npv={npv!r})".format(
|
||||
id=self.id, project_id=self.project_id, npv=self.npv
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ScenarioProfitability(Base):
|
||||
"""Snapshot of profitability metrics for an individual scenario."""
|
||||
|
||||
__tablename__ = "scenario_profitability_snapshots"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
created_by_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||
)
|
||||
calculation_source: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True)
|
||||
calculated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
npv: Mapped[float | None] = mapped_column(Numeric(18, 2), nullable=True)
|
||||
irr_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
payback_period_years: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 4), nullable=True
|
||||
)
|
||||
margin_pct: Mapped[float | None] = mapped_column(
|
||||
Numeric(12, 6), nullable=True)
|
||||
revenue_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
opex_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
sustaining_capex_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
capex: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True)
|
||||
net_cash_flow_total: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 2), nullable=True
|
||||
)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenario: Mapped[Scenario] = relationship(
|
||||
"Scenario", back_populates="profitability_snapshots")
|
||||
created_by: Mapped[User | None] = relationship("User")
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
"ScenarioProfitability(id={id!r}, scenario_id={scenario_id!r}, npv={npv!r})".format(
|
||||
id=self.id, scenario_id=self.scenario_id, npv=self.npv
|
||||
)
|
||||
)
|
||||
104
models/project.py
Normal file
104
models/project.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from .enums import MiningOperationType, sql_enum
|
||||
from .profitability_snapshot import ProjectProfitability
|
||||
from .capex_snapshot import ProjectCapexSnapshot
|
||||
from .opex_snapshot import ProjectOpexSnapshot
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .scenario import Scenario
|
||||
from .pricing_settings import PricingSettings
|
||||
|
||||
|
||||
class Project(Base):
|
||||
"""Top-level mining project grouping multiple scenarios."""
|
||||
|
||||
__tablename__ = "projects"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||
location: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
operation_type: Mapped[MiningOperationType] = mapped_column(
|
||||
sql_enum(MiningOperationType, name="miningoperationtype"),
|
||||
nullable=False,
|
||||
default=MiningOperationType.OTHER,
|
||||
)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
pricing_settings_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("pricing_settings.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenarios: Mapped[List["Scenario"]] = relationship(
|
||||
"Scenario",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
pricing_settings: Mapped["PricingSettings | None"] = relationship(
|
||||
"PricingSettings",
|
||||
back_populates="projects",
|
||||
)
|
||||
profitability_snapshots: Mapped[List["ProjectProfitability"]] = relationship(
|
||||
"ProjectProfitability",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ProjectProfitability.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
capex_snapshots: Mapped[List["ProjectCapexSnapshot"]] = relationship(
|
||||
"ProjectCapexSnapshot",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ProjectCapexSnapshot.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
opex_snapshots: Mapped[List["ProjectOpexSnapshot"]] = relationship(
|
||||
"ProjectOpexSnapshot",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ProjectOpexSnapshot.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def latest_profitability(self) -> "ProjectProfitability | None":
|
||||
"""Return the most recent profitability snapshot, if any."""
|
||||
|
||||
if not self.profitability_snapshots:
|
||||
return None
|
||||
return self.profitability_snapshots[0]
|
||||
|
||||
@property
|
||||
def latest_capex(self) -> "ProjectCapexSnapshot | None":
|
||||
"""Return the most recent capex snapshot, if any."""
|
||||
|
||||
if not self.capex_snapshots:
|
||||
return None
|
||||
return self.capex_snapshots[0]
|
||||
|
||||
@property
|
||||
def latest_opex(self) -> "ProjectOpexSnapshot | None":
|
||||
"""Return the most recent opex snapshot, if any."""
|
||||
|
||||
if not self.opex_snapshots:
|
||||
return None
|
||||
return self.opex_snapshots[0]
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||
return f"Project(id={self.id!r}, name={self.name!r})"
|
||||
@@ -1,39 +1,133 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func
|
||||
from sqlalchemy.orm import relationship
|
||||
from models.simulation_result import SimulationResult
|
||||
from models.capex import Capex
|
||||
from models.opex import Opex
|
||||
from models.consumption import Consumption
|
||||
from models.production_output import ProductionOutput
|
||||
from models.equipment import Equipment
|
||||
from models.maintenance import Maintenance
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from sqlalchemy import (
|
||||
Date,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
Text,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
from services.currency import normalise_currency
|
||||
from .enums import ResourceType, ScenarioStatus, sql_enum
|
||||
from .profitability_snapshot import ScenarioProfitability
|
||||
from .capex_snapshot import ScenarioCapexSnapshot
|
||||
from .opex_snapshot import ScenarioOpexSnapshot
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .financial_input import FinancialInput
|
||||
from .project import Project
|
||||
from .simulation_parameter import SimulationParameter
|
||||
|
||||
|
||||
class Scenario(Base):
|
||||
__tablename__ = "scenario"
|
||||
"""A specific configuration of assumptions for a project."""
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, unique=True, nullable=False)
|
||||
description = Column(String)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
parameters = relationship("Parameter", back_populates="scenario")
|
||||
simulation_results = relationship(
|
||||
SimulationResult, back_populates="scenario")
|
||||
capex_items = relationship(
|
||||
Capex, back_populates="scenario")
|
||||
opex_items = relationship(
|
||||
Opex, back_populates="scenario")
|
||||
consumption_items = relationship(
|
||||
Consumption, back_populates="scenario")
|
||||
production_output_items = relationship(
|
||||
ProductionOutput, back_populates="scenario")
|
||||
equipment_items = relationship(
|
||||
Equipment, back_populates="scenario")
|
||||
maintenance_items = relationship(
|
||||
Maintenance, back_populates="scenario")
|
||||
__tablename__ = "scenarios"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("project_id", "name",
|
||||
name="uq_scenarios_project_name"),
|
||||
)
|
||||
|
||||
# relationships can be defined later
|
||||
def __repr__(self):
|
||||
return f"<Scenario id={self.id} name={self.name}>"
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
status: Mapped[ScenarioStatus] = mapped_column(
|
||||
sql_enum(ScenarioStatus, name="scenariostatus"),
|
||||
nullable=False,
|
||||
default=ScenarioStatus.DRAFT,
|
||||
)
|
||||
start_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||
end_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||
discount_rate: Mapped[float | None] = mapped_column(
|
||||
Numeric(5, 2), nullable=True)
|
||||
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||
primary_resource: Mapped[ResourceType | None] = mapped_column(
|
||||
sql_enum(ResourceType, name="resourcetype"), nullable=True
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
project: Mapped["Project"] = relationship(
|
||||
"Project", back_populates="scenarios")
|
||||
financial_inputs: Mapped[List["FinancialInput"]] = relationship(
|
||||
"FinancialInput",
|
||||
back_populates="scenario",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
simulation_parameters: Mapped[List["SimulationParameter"]] = relationship(
|
||||
"SimulationParameter",
|
||||
back_populates="scenario",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
profitability_snapshots: Mapped[List["ScenarioProfitability"]] = relationship(
|
||||
"ScenarioProfitability",
|
||||
back_populates="scenario",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ScenarioProfitability.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
capex_snapshots: Mapped[List["ScenarioCapexSnapshot"]] = relationship(
|
||||
"ScenarioCapexSnapshot",
|
||||
back_populates="scenario",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ScenarioCapexSnapshot.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
opex_snapshots: Mapped[List["ScenarioOpexSnapshot"]] = relationship(
|
||||
"ScenarioOpexSnapshot",
|
||||
back_populates="scenario",
|
||||
cascade="all, delete-orphan",
|
||||
order_by=lambda: ScenarioOpexSnapshot.calculated_at.desc(),
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
@validates("currency")
|
||||
def _normalise_currency(self, key: str, value: str | None) -> str | None:
|
||||
# Normalise to uppercase ISO-4217; raises when the code is malformed.
|
||||
return normalise_currency(value)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"Scenario(id={self.id!r}, name={self.name!r}, project_id={self.project_id!r})"
|
||||
|
||||
@property
|
||||
def latest_profitability(self) -> "ScenarioProfitability | None":
|
||||
"""Return the most recent profitability snapshot for this scenario."""
|
||||
|
||||
if not self.profitability_snapshots:
|
||||
return None
|
||||
return self.profitability_snapshots[0]
|
||||
|
||||
@property
|
||||
def latest_capex(self) -> "ScenarioCapexSnapshot | None":
|
||||
"""Return the most recent capex snapshot for this scenario."""
|
||||
|
||||
if not self.capex_snapshots:
|
||||
return None
|
||||
return self.capex_snapshots[0]
|
||||
|
||||
@property
|
||||
def latest_opex(self) -> "ScenarioOpexSnapshot | None":
|
||||
"""Return the most recent opex snapshot for this scenario."""
|
||||
|
||||
if not self.opex_snapshots:
|
||||
return None
|
||||
return self.opex_snapshots[0]
|
||||
|
||||
69
models/simulation_parameter.py
Normal file
69
models/simulation_parameter.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .enums import DistributionType, ResourceType, StochasticVariable, sql_enum
|
||||
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .scenario import Scenario
|
||||
|
||||
|
||||
class SimulationParameter(Base):
|
||||
"""Probability distribution settings for scenario simulations."""
|
||||
|
||||
__tablename__ = "simulation_parameters"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
scenario_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
distribution: Mapped[DistributionType] = mapped_column(
|
||||
sql_enum(DistributionType, name="distributiontype"), nullable=False
|
||||
)
|
||||
variable: Mapped[StochasticVariable | None] = mapped_column(
|
||||
sql_enum(StochasticVariable, name="stochasticvariable"), nullable=True
|
||||
)
|
||||
resource_type: Mapped[ResourceType | None] = mapped_column(
|
||||
sql_enum(ResourceType, name="resourcetype"), nullable=True
|
||||
)
|
||||
mean_value: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 4), nullable=True)
|
||||
standard_deviation: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 4), nullable=True)
|
||||
minimum_value: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 4), nullable=True)
|
||||
maximum_value: Mapped[float | None] = mapped_column(
|
||||
Numeric(18, 4), nullable=True)
|
||||
unit: Mapped[str | None] = mapped_column(String(32), nullable=True)
|
||||
configuration: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
scenario: Mapped["Scenario"] = relationship(
|
||||
"Scenario", back_populates="simulation_parameters"
|
||||
)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return (
|
||||
f"SimulationParameter(id={self.id!r}, scenario_id={self.scenario_id!r}, "
|
||||
f"name={self.name!r})"
|
||||
)
|
||||
@@ -1,14 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, Float, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from config.database import Base
|
||||
|
||||
|
||||
class SimulationResult(Base):
|
||||
__tablename__ = "simulation_result"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
||||
iteration = Column(Integer, nullable=False)
|
||||
result = Column(Float, nullable=False)
|
||||
|
||||
scenario = relationship("Scenario", back_populates="simulation_results")
|
||||
176
models/user.py
Normal file
176
models/user.py
Normal file
@@ -0,0 +1,176 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from passlib.context import CryptContext
|
||||
|
||||
try: # pragma: no cover - defensive compatibility shim
|
||||
import importlib.metadata as importlib_metadata
|
||||
import argon2 # type: ignore
|
||||
|
||||
setattr(argon2, "__version__", importlib_metadata.version("argon2-cffi"))
|
||||
except Exception:
|
||||
pass
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from config.database import Base
|
||||
|
||||
# Configure password hashing strategy. Argon2 provides strong resistance against
|
||||
# GPU-based cracking attempts, aligning with the security plan.
|
||||
password_context = CryptContext(schemes=["argon2"], deprecated="auto")
|
||||
|
||||
|
||||
class User(Base):
|
||||
"""Authenticated platform user with optional elevated privileges."""
|
||||
|
||||
__tablename__ = "users"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("email", name="uq_users_email"),
|
||||
UniqueConstraint("username", name="uq_users_username"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
email: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
username: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True)
|
||||
is_superuser: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=False)
|
||||
last_login_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
role_assignments: Mapped[List["UserRole"]] = relationship(
|
||||
"UserRole",
|
||||
back_populates="user",
|
||||
cascade="all, delete-orphan",
|
||||
foreign_keys="UserRole.user_id",
|
||||
)
|
||||
roles: Mapped[List["Role"]] = relationship(
|
||||
"Role",
|
||||
secondary="user_roles",
|
||||
primaryjoin="User.id == UserRole.user_id",
|
||||
secondaryjoin="Role.id == UserRole.role_id",
|
||||
viewonly=True,
|
||||
back_populates="users",
|
||||
)
|
||||
|
||||
def set_password(self, raw_password: str) -> None:
|
||||
"""Hash and store a password for the user."""
|
||||
|
||||
self.password_hash = self.hash_password(raw_password)
|
||||
|
||||
@staticmethod
|
||||
def hash_password(raw_password: str) -> str:
|
||||
"""Return the Argon2 hash for a clear-text password."""
|
||||
|
||||
return password_context.hash(raw_password)
|
||||
|
||||
def verify_password(self, candidate_password: str) -> bool:
|
||||
"""Validate a password against the stored hash."""
|
||||
|
||||
if not self.password_hash:
|
||||
return False
|
||||
return password_context.verify(candidate_password, self.password_hash)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||
return f"User(id={self.id!r}, email={self.email!r})"
|
||||
|
||||
|
||||
class Role(Base):
|
||||
"""Role encapsulating a set of permissions."""
|
||||
|
||||
__tablename__ = "roles"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String(64), nullable=False, unique=True)
|
||||
display_name: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||
)
|
||||
|
||||
assignments: Mapped[List["UserRole"]] = relationship(
|
||||
"UserRole",
|
||||
back_populates="role",
|
||||
cascade="all, delete-orphan",
|
||||
foreign_keys="UserRole.role_id",
|
||||
)
|
||||
users: Mapped[List["User"]] = relationship(
|
||||
"User",
|
||||
secondary="user_roles",
|
||||
primaryjoin="Role.id == UserRole.role_id",
|
||||
secondaryjoin="User.id == UserRole.user_id",
|
||||
viewonly=True,
|
||||
back_populates="roles",
|
||||
)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||
return f"Role(id={self.id!r}, name={self.name!r})"
|
||||
|
||||
|
||||
class UserRole(Base):
|
||||
"""Association between users and roles with assignment metadata."""
|
||||
|
||||
__tablename__ = "user_roles"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("user_id", "role_id", name="uq_user_roles_user_role"),
|
||||
)
|
||||
|
||||
user_id: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("users.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
role_id: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("roles.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
granted_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
granted_by: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("users.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
user: Mapped["User"] = relationship(
|
||||
"User",
|
||||
foreign_keys=[user_id],
|
||||
back_populates="role_assignments",
|
||||
)
|
||||
role: Mapped["Role"] = relationship(
|
||||
"Role",
|
||||
foreign_keys=[role_id],
|
||||
back_populates="assignments",
|
||||
)
|
||||
granted_by_user: Mapped[Optional["User"]] = relationship(
|
||||
"User",
|
||||
foreign_keys=[granted_by],
|
||||
)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - debugging helper
|
||||
return f"UserRole(user_id={self.user_id!r}, role_id={self.role_id!r})"
|
||||
117
monitoring/__init__.py
Normal file
117
monitoring/__init__.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, Response
|
||||
from prometheus_client import CONTENT_TYPE_LATEST, generate_latest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from config.database import get_db
|
||||
from services.metrics import MetricsService
|
||||
|
||||
|
||||
router = APIRouter(prefix="/metrics", tags=["monitoring"])
|
||||
|
||||
|
||||
@router.get("", summary="Prometheus metrics endpoint", include_in_schema=False)
|
||||
async def metrics_endpoint() -> Response:
|
||||
payload = generate_latest()
|
||||
return Response(content=payload, media_type=CONTENT_TYPE_LATEST)
|
||||
|
||||
|
||||
@router.get("/performance", summary="Get performance metrics")
|
||||
async def get_performance_metrics(
|
||||
metric_name: Optional[str] = Query(
|
||||
None, description="Filter by metric name"),
|
||||
hours: int = Query(24, description="Hours back to look"),
|
||||
db: Session = Depends(get_db),
|
||||
) -> dict:
|
||||
"""Get aggregated performance metrics."""
|
||||
service = MetricsService(db)
|
||||
start_time = datetime.utcnow() - timedelta(hours=hours)
|
||||
|
||||
if metric_name:
|
||||
metrics = service.get_metrics(
|
||||
metric_name=metric_name, start_time=start_time)
|
||||
aggregated = service.get_aggregated_metrics(
|
||||
metric_name, start_time=start_time)
|
||||
return {
|
||||
"metric_name": metric_name,
|
||||
"period_hours": hours,
|
||||
"aggregated": aggregated,
|
||||
"recent_samples": [
|
||||
{
|
||||
"timestamp": m.timestamp.isoformat(),
|
||||
"value": m.value,
|
||||
"labels": m.labels,
|
||||
"endpoint": m.endpoint,
|
||||
"method": m.method,
|
||||
"status_code": m.status_code,
|
||||
"duration_seconds": m.duration_seconds,
|
||||
}
|
||||
for m in metrics[:50] # Last 50 samples
|
||||
],
|
||||
}
|
||||
|
||||
# Return summary for all metrics
|
||||
all_metrics = service.get_metrics(start_time=start_time, limit=1000)
|
||||
metric_types = {}
|
||||
for m in all_metrics:
|
||||
if m.metric_name not in metric_types:
|
||||
metric_types[m.metric_name] = []
|
||||
metric_types[m.metric_name].append(m.value)
|
||||
|
||||
summary = {}
|
||||
for name, values in metric_types.items():
|
||||
summary[name] = {
|
||||
"count": len(values),
|
||||
"avg": sum(values) / len(values) if values else 0,
|
||||
"min": min(values) if values else 0,
|
||||
"max": max(values) if values else 0,
|
||||
}
|
||||
|
||||
return {
|
||||
"period_hours": hours,
|
||||
"summary": summary,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/health", summary="Detailed health check with metrics")
|
||||
async def detailed_health(db: Session = Depends(get_db)) -> dict:
|
||||
"""Get detailed health status with recent metrics."""
|
||||
service = MetricsService(db)
|
||||
last_hour = datetime.utcnow() - timedelta(hours=1)
|
||||
|
||||
# Get request metrics from last hour
|
||||
request_metrics = service.get_metrics(
|
||||
metric_name="http_request", start_time=last_hour
|
||||
)
|
||||
|
||||
if request_metrics:
|
||||
durations = []
|
||||
error_count = 0
|
||||
for m in request_metrics:
|
||||
if m.duration_seconds is not None:
|
||||
durations.append(m.duration_seconds)
|
||||
if m.status_code is not None:
|
||||
if m.status_code >= 400:
|
||||
error_count += 1
|
||||
total_requests = len(request_metrics)
|
||||
|
||||
avg_duration = sum(durations) / len(durations) if durations else 0
|
||||
error_rate = error_count / total_requests if total_requests > 0 else 0
|
||||
else:
|
||||
avg_duration = 0
|
||||
error_rate = 0
|
||||
total_requests = 0
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"metrics": {
|
||||
"requests_last_hour": total_requests,
|
||||
"avg_response_time_seconds": avg_duration,
|
||||
"error_rate": error_rate,
|
||||
},
|
||||
}
|
||||
108
monitoring/metrics.py
Normal file
108
monitoring/metrics.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
from prometheus_client import Counter, Histogram, Gauge
|
||||
|
||||
IMPORT_DURATION = Histogram(
|
||||
"calminer_import_duration_seconds",
|
||||
"Duration of import preview and commit operations",
|
||||
labelnames=("dataset", "action", "status"),
|
||||
)
|
||||
|
||||
IMPORT_TOTAL = Counter(
|
||||
"calminer_import_total",
|
||||
"Count of import operations",
|
||||
labelnames=("dataset", "action", "status"),
|
||||
)
|
||||
|
||||
EXPORT_DURATION = Histogram(
|
||||
"calminer_export_duration_seconds",
|
||||
"Duration of export operations",
|
||||
labelnames=("dataset", "status", "format"),
|
||||
)
|
||||
|
||||
EXPORT_TOTAL = Counter(
|
||||
"calminer_export_total",
|
||||
"Count of export operations",
|
||||
labelnames=("dataset", "status", "format"),
|
||||
)
|
||||
|
||||
# General performance metrics
|
||||
REQUEST_DURATION = Histogram(
|
||||
"calminer_request_duration_seconds",
|
||||
"Duration of HTTP requests",
|
||||
labelnames=("method", "endpoint", "status"),
|
||||
)
|
||||
|
||||
REQUEST_TOTAL = Counter(
|
||||
"calminer_request_total",
|
||||
"Count of HTTP requests",
|
||||
labelnames=("method", "endpoint", "status"),
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
"calminer_active_connections",
|
||||
"Number of active connections",
|
||||
)
|
||||
|
||||
DB_CONNECTIONS = Gauge(
|
||||
"calminer_db_connections",
|
||||
"Number of database connections",
|
||||
)
|
||||
|
||||
# Business metrics
|
||||
PROJECT_OPERATIONS = Counter(
|
||||
"calminer_project_operations_total",
|
||||
"Count of project operations",
|
||||
labelnames=("operation", "status"),
|
||||
)
|
||||
|
||||
SCENARIO_OPERATIONS = Counter(
|
||||
"calminer_scenario_operations_total",
|
||||
"Count of scenario operations",
|
||||
labelnames=("operation", "status"),
|
||||
)
|
||||
|
||||
SIMULATION_RUNS = Counter(
|
||||
"calminer_simulation_runs_total",
|
||||
"Count of Monte Carlo simulation runs",
|
||||
labelnames=("status",),
|
||||
)
|
||||
|
||||
SIMULATION_DURATION = Histogram(
|
||||
"calminer_simulation_duration_seconds",
|
||||
"Duration of Monte Carlo simulations",
|
||||
labelnames=("status",),
|
||||
)
|
||||
|
||||
|
||||
def observe_import(action: str, dataset: str, status: str, seconds: float) -> None:
|
||||
IMPORT_TOTAL.labels(dataset=dataset, action=action, status=status).inc()
|
||||
IMPORT_DURATION.labels(dataset=dataset, action=action,
|
||||
status=status).observe(seconds)
|
||||
|
||||
|
||||
def observe_export(dataset: str, status: str, export_format: str, seconds: float) -> None:
|
||||
EXPORT_TOTAL.labels(dataset=dataset, status=status,
|
||||
format=export_format).inc()
|
||||
EXPORT_DURATION.labels(dataset=dataset, status=status,
|
||||
format=export_format).observe(seconds)
|
||||
|
||||
|
||||
def observe_request(method: str, endpoint: str, status: int, seconds: float) -> None:
|
||||
REQUEST_TOTAL.labels(method=method, endpoint=endpoint, status=status).inc()
|
||||
REQUEST_DURATION.labels(method=method, endpoint=endpoint,
|
||||
status=status).observe(seconds)
|
||||
|
||||
|
||||
def observe_project_operation(operation: str, status: str = "success") -> None:
|
||||
PROJECT_OPERATIONS.labels(operation=operation, status=status).inc()
|
||||
|
||||
|
||||
def observe_scenario_operation(operation: str, status: str = "success") -> None:
|
||||
SCENARIO_OPERATIONS.labels(operation=operation, status=status).inc()
|
||||
|
||||
|
||||
def observe_simulation(status: str, duration_seconds: float) -> None:
|
||||
SIMULATION_RUNS.labels(status=status).inc()
|
||||
SIMULATION_DURATION.labels(status=status).observe(duration_seconds)
|
||||
46
pyproject.toml
Normal file
46
pyproject.toml
Normal file
@@ -0,0 +1,46 @@
|
||||
[tool.black]
|
||||
line-length = 80
|
||||
target-version = ['py310']
|
||||
include = '\\.pyi?$'
|
||||
exclude = '''
|
||||
/(
|
||||
.git
|
||||
| .hg
|
||||
| .mypy_cache
|
||||
| .tox
|
||||
| .venv
|
||||
| build
|
||||
| dist
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
pythonpath = ["."]
|
||||
testpaths = ["tests"]
|
||||
addopts = "-ra --strict-config --strict-markers --cov=. --cov-report=term-missing --cov-report=xml --cov-fail-under=80"
|
||||
markers = [
|
||||
"asyncio: marks tests as async (using pytest-asyncio)",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
source = ["."]
|
||||
omit = [
|
||||
"tests/*",
|
||||
"scripts/*",
|
||||
"main.py",
|
||||
"routes/reports.py",
|
||||
"routes/calculations.py",
|
||||
"services/calculations.py",
|
||||
"services/importers.py",
|
||||
"services/reporting.py",
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
skip_empty = true
|
||||
show_missing = true
|
||||
|
||||
[tool.bandit]
|
||||
exclude_dirs = ["scripts"]
|
||||
skips = ["B101", "B601"] # B101: assert_used, B601: shell_injection (may be false positives)
|
||||
|
||||
1
requirements-dev.txt
Normal file
1
requirements-dev.txt
Normal file
@@ -0,0 +1 @@
|
||||
-r requirements.txt
|
||||
9
requirements-test.txt
Normal file
9
requirements-test.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
pytest
|
||||
pytest-asyncio
|
||||
pytest-cov
|
||||
pytest-httpx
|
||||
python-jose
|
||||
ruff
|
||||
black
|
||||
mypy
|
||||
bandit
|
||||
@@ -1,4 +1,5 @@
|
||||
fastapi
|
||||
pydantic
|
||||
uvicorn
|
||||
sqlalchemy
|
||||
psycopg2-binary
|
||||
@@ -7,7 +8,10 @@ httpx
|
||||
jinja2
|
||||
pandas
|
||||
numpy
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-httpx
|
||||
playwright
|
||||
passlib
|
||||
argon2-cffi
|
||||
python-jose
|
||||
python-multipart
|
||||
openpyxl
|
||||
prometheus-client
|
||||
plotly
|
||||
1
routes/__init__.py
Normal file
1
routes/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API route registrations."""
|
||||
484
routes/auth.py
Normal file
484
routes/auth.py
Normal file
@@ -0,0 +1,484 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Iterable
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, UploadFile, status
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from pydantic import ValidationError
|
||||
from starlette.datastructures import FormData
|
||||
|
||||
from dependencies import (
|
||||
get_auth_session,
|
||||
get_jwt_settings,
|
||||
get_session_strategy,
|
||||
get_unit_of_work,
|
||||
require_current_user,
|
||||
)
|
||||
from models import Role, User
|
||||
from schemas.auth import (
|
||||
LoginForm,
|
||||
PasswordResetForm,
|
||||
PasswordResetRequestForm,
|
||||
RegistrationForm,
|
||||
)
|
||||
from services.exceptions import EntityConflictError
|
||||
from services.security import (
|
||||
JWTSettings,
|
||||
TokenDecodeError,
|
||||
TokenExpiredError,
|
||||
TokenTypeMismatchError,
|
||||
create_access_token,
|
||||
create_refresh_token,
|
||||
decode_access_token,
|
||||
hash_password,
|
||||
verify_password,
|
||||
)
|
||||
from services.session import (
|
||||
AuthSession,
|
||||
SessionStrategy,
|
||||
clear_session_cookies,
|
||||
set_session_cookies,
|
||||
)
|
||||
from services.repositories import RoleRepository, UserRepository
|
||||
from services.unit_of_work import UnitOfWork
|
||||
from routes.template_filters import create_templates
|
||||
|
||||
router = APIRouter(tags=["Authentication"])
|
||||
templates = create_templates()
|
||||
|
||||
_PASSWORD_RESET_SCOPE = "password-reset"
|
||||
_AUTH_SCOPE = "auth"
|
||||
|
||||
|
||||
def _template(
|
||||
request: Request,
|
||||
template_name: str,
|
||||
context: dict[str, Any],
|
||||
*,
|
||||
status_code: int = status.HTTP_200_OK,
|
||||
) -> HTMLResponse:
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
template_name,
|
||||
context,
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
|
||||
def _validation_errors(exc: ValidationError) -> list[str]:
|
||||
return [error.get("msg", "Invalid input.") for error in exc.errors()]
|
||||
|
||||
|
||||
def _scopes(include: Iterable[str]) -> list[str]:
|
||||
return list(include)
|
||||
|
||||
|
||||
def _normalise_form_data(form_data: FormData) -> dict[str, str]:
|
||||
normalised: dict[str, str] = {}
|
||||
for key, value in form_data.multi_items():
|
||||
if isinstance(value, UploadFile):
|
||||
str_value = value.filename or ""
|
||||
else:
|
||||
str_value = str(value)
|
||||
normalised[key] = str_value
|
||||
return normalised
|
||||
|
||||
|
||||
def _require_users_repo(uow: UnitOfWork) -> UserRepository:
|
||||
if not uow.users:
|
||||
raise RuntimeError("User repository is not initialised")
|
||||
return uow.users
|
||||
|
||||
|
||||
def _require_roles_repo(uow: UnitOfWork) -> RoleRepository:
|
||||
if not uow.roles:
|
||||
raise RuntimeError("Role repository is not initialised")
|
||||
return uow.roles
|
||||
|
||||
|
||||
@router.get("/login", response_class=HTMLResponse, include_in_schema=False, name="auth.login_form")
|
||||
def login_form(request: Request) -> HTMLResponse:
|
||||
return _template(
|
||||
request,
|
||||
"login.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.login_submit"),
|
||||
"errors": [],
|
||||
"username": "",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/login", include_in_schema=False, name="auth.login_submit")
|
||||
async def login_submit(
|
||||
request: Request,
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||
session_strategy: SessionStrategy = Depends(get_session_strategy),
|
||||
):
|
||||
form_data = _normalise_form_data(await request.form())
|
||||
try:
|
||||
form = LoginForm(**form_data)
|
||||
except ValidationError as exc:
|
||||
return _template(
|
||||
request,
|
||||
"login.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.login_submit"),
|
||||
"errors": _validation_errors(exc),
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
identifier = form.username
|
||||
users_repo = _require_users_repo(uow)
|
||||
user = _lookup_user(users_repo, identifier)
|
||||
errors: list[str] = []
|
||||
|
||||
if not user or not verify_password(form.password, user.password_hash):
|
||||
errors.append("Invalid username or password.")
|
||||
elif not user.is_active:
|
||||
errors.append("Account is inactive. Contact an administrator.")
|
||||
|
||||
if errors:
|
||||
return _template(
|
||||
request,
|
||||
"login.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.login_submit"),
|
||||
"errors": errors,
|
||||
"username": identifier,
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
assert user is not None # mypy hint - guarded above
|
||||
user.last_login_at = datetime.now(timezone.utc)
|
||||
|
||||
access_token = create_access_token(
|
||||
str(user.id),
|
||||
jwt_settings,
|
||||
scopes=_scopes((_AUTH_SCOPE,)),
|
||||
)
|
||||
refresh_token = create_refresh_token(
|
||||
str(user.id),
|
||||
jwt_settings,
|
||||
scopes=_scopes((_AUTH_SCOPE,)),
|
||||
)
|
||||
|
||||
response = RedirectResponse(
|
||||
request.url_for("dashboard.home"),
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
set_session_cookies(
|
||||
response,
|
||||
access_token=access_token,
|
||||
refresh_token=refresh_token,
|
||||
strategy=session_strategy,
|
||||
jwt_settings=jwt_settings,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/logout", include_in_schema=False, name="auth.logout")
|
||||
async def logout(
|
||||
request: Request,
|
||||
_: User = Depends(require_current_user),
|
||||
session: AuthSession = Depends(get_auth_session),
|
||||
session_strategy: SessionStrategy = Depends(get_session_strategy),
|
||||
) -> RedirectResponse:
|
||||
session.mark_cleared()
|
||||
redirect_url = request.url_for(
|
||||
"auth.login_form").include_query_params(logout="1")
|
||||
response = RedirectResponse(
|
||||
redirect_url,
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
clear_session_cookies(response, session_strategy)
|
||||
return response
|
||||
|
||||
|
||||
def _lookup_user(users_repo: UserRepository, identifier: str) -> User | None:
|
||||
if "@" in identifier:
|
||||
return users_repo.get_by_email(identifier.lower(), with_roles=True)
|
||||
return users_repo.get_by_username(identifier, with_roles=True)
|
||||
|
||||
|
||||
@router.get("/register", response_class=HTMLResponse, include_in_schema=False, name="auth.register_form")
|
||||
def register_form(request: Request) -> HTMLResponse:
|
||||
return _template(
|
||||
request,
|
||||
"register.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.register_submit"),
|
||||
"errors": [],
|
||||
"form_data": None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/register", include_in_schema=False, name="auth.register_submit")
|
||||
async def register_submit(
|
||||
request: Request,
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
):
|
||||
form_data = _normalise_form_data(await request.form())
|
||||
try:
|
||||
form = RegistrationForm(**form_data)
|
||||
except ValidationError as exc:
|
||||
return _registration_error_response(request, _validation_errors(exc))
|
||||
|
||||
errors: list[str] = []
|
||||
users_repo = _require_users_repo(uow)
|
||||
roles_repo = _require_roles_repo(uow)
|
||||
uow.ensure_default_roles()
|
||||
|
||||
if users_repo.get_by_email(form.email):
|
||||
errors.append("Email is already registered.")
|
||||
if users_repo.get_by_username(form.username):
|
||||
errors.append("Username is already taken.")
|
||||
|
||||
if errors:
|
||||
return _registration_error_response(request, errors, form)
|
||||
|
||||
user = User(
|
||||
email=form.email,
|
||||
username=form.username,
|
||||
password_hash=hash_password(form.password),
|
||||
is_active=True,
|
||||
is_superuser=False,
|
||||
)
|
||||
|
||||
try:
|
||||
created = users_repo.create(user)
|
||||
except EntityConflictError:
|
||||
return _registration_error_response(
|
||||
request,
|
||||
["An account with this username or email already exists."],
|
||||
form,
|
||||
)
|
||||
|
||||
viewer_role = _ensure_viewer_role(roles_repo)
|
||||
if viewer_role is not None:
|
||||
users_repo.assign_role(
|
||||
user_id=created.id,
|
||||
role_id=viewer_role.id,
|
||||
granted_by=created.id,
|
||||
)
|
||||
|
||||
redirect_url = request.url_for(
|
||||
"auth.login_form").include_query_params(registered="1")
|
||||
return RedirectResponse(
|
||||
redirect_url,
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
|
||||
|
||||
def _registration_error_response(
|
||||
request: Request,
|
||||
errors: list[str],
|
||||
form: RegistrationForm | None = None,
|
||||
) -> HTMLResponse:
|
||||
context = {
|
||||
"form_action": request.url_for("auth.register_submit"),
|
||||
"errors": errors,
|
||||
"form_data": form.model_dump(exclude={"password", "confirm_password"}) if form else None,
|
||||
}
|
||||
return _template(
|
||||
request,
|
||||
"register.html",
|
||||
context,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
def _ensure_viewer_role(roles_repo: RoleRepository) -> Role | None:
|
||||
viewer = roles_repo.get_by_name("viewer")
|
||||
if viewer:
|
||||
return viewer
|
||||
return roles_repo.get_by_name("viewer")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/forgot-password",
|
||||
response_class=HTMLResponse,
|
||||
include_in_schema=False,
|
||||
name="auth.password_reset_request_form",
|
||||
)
|
||||
def password_reset_request_form(request: Request) -> HTMLResponse:
|
||||
return _template(
|
||||
request,
|
||||
"forgot_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||
"errors": [],
|
||||
"message": None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/forgot-password",
|
||||
include_in_schema=False,
|
||||
name="auth.password_reset_request_submit",
|
||||
)
|
||||
async def password_reset_request_submit(
|
||||
request: Request,
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||
):
|
||||
form_data = _normalise_form_data(await request.form())
|
||||
try:
|
||||
form = PasswordResetRequestForm(**form_data)
|
||||
except ValidationError as exc:
|
||||
return _template(
|
||||
request,
|
||||
"forgot_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||
"errors": _validation_errors(exc),
|
||||
"message": None,
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
users_repo = _require_users_repo(uow)
|
||||
user = users_repo.get_by_email(form.email)
|
||||
if not user:
|
||||
return _template(
|
||||
request,
|
||||
"forgot_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||
"errors": [],
|
||||
"message": "If an account exists, a reset link has been sent.",
|
||||
},
|
||||
)
|
||||
|
||||
token = create_access_token(
|
||||
str(user.id),
|
||||
jwt_settings,
|
||||
scopes=_scopes((_PASSWORD_RESET_SCOPE,)),
|
||||
expires_delta=timedelta(hours=1),
|
||||
)
|
||||
|
||||
reset_url = request.url_for(
|
||||
"auth.password_reset_form").include_query_params(token=token)
|
||||
return RedirectResponse(reset_url, status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/reset-password",
|
||||
response_class=HTMLResponse,
|
||||
include_in_schema=False,
|
||||
name="auth.password_reset_form",
|
||||
)
|
||||
def password_reset_form(
|
||||
request: Request,
|
||||
token: str | None = None,
|
||||
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||
) -> HTMLResponse:
|
||||
errors: list[str] = []
|
||||
if not token:
|
||||
errors.append("Missing password reset token.")
|
||||
else:
|
||||
try:
|
||||
payload = decode_access_token(token, jwt_settings)
|
||||
if _PASSWORD_RESET_SCOPE not in payload.scopes:
|
||||
errors.append("Invalid token scope.")
|
||||
except TokenExpiredError:
|
||||
errors.append(
|
||||
"Token has expired. Please request a new password reset.")
|
||||
except (TokenDecodeError, TokenTypeMismatchError):
|
||||
errors.append("Invalid password reset token.")
|
||||
|
||||
return _template(
|
||||
request,
|
||||
"reset_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_submit"),
|
||||
"token": token,
|
||||
"errors": errors,
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST if errors else status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/reset-password",
|
||||
include_in_schema=False,
|
||||
name="auth.password_reset_submit",
|
||||
)
|
||||
async def password_reset_submit(
|
||||
request: Request,
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||
):
|
||||
form_data = _normalise_form_data(await request.form())
|
||||
try:
|
||||
form = PasswordResetForm(**form_data)
|
||||
except ValidationError as exc:
|
||||
return _template(
|
||||
request,
|
||||
"reset_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_submit"),
|
||||
"token": form_data.get("token"),
|
||||
"errors": _validation_errors(exc),
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
payload = decode_access_token(form.token, jwt_settings)
|
||||
except TokenExpiredError:
|
||||
return _reset_error_response(
|
||||
request,
|
||||
form.token,
|
||||
"Token has expired. Please request a new password reset.",
|
||||
)
|
||||
except (TokenDecodeError, TokenTypeMismatchError):
|
||||
return _reset_error_response(
|
||||
request,
|
||||
form.token,
|
||||
"Invalid password reset token.",
|
||||
)
|
||||
|
||||
if _PASSWORD_RESET_SCOPE not in payload.scopes:
|
||||
return _reset_error_response(
|
||||
request,
|
||||
form.token,
|
||||
"Invalid password reset token scope.",
|
||||
)
|
||||
|
||||
users_repo = _require_users_repo(uow)
|
||||
user_id = int(payload.sub)
|
||||
user = users_repo.get(user_id)
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
|
||||
|
||||
user.set_password(form.password)
|
||||
if not user.is_active:
|
||||
user.is_active = True
|
||||
|
||||
redirect_url = request.url_for(
|
||||
"auth.login_form").include_query_params(reset="1")
|
||||
return RedirectResponse(
|
||||
redirect_url,
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
|
||||
|
||||
def _reset_error_response(request: Request, token: str, message: str) -> HTMLResponse:
|
||||
return _template(
|
||||
request,
|
||||
"reset_password.html",
|
||||
{
|
||||
"form_action": request.url_for("auth.password_reset_submit"),
|
||||
"token": token,
|
||||
"errors": [message],
|
||||
},
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
2119
routes/calculations.py
Normal file
2119
routes/calculations.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,50 +0,0 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from pydantic import BaseModel, ConfigDict, PositiveFloat, field_validator
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.consumption import Consumption
|
||||
from routes.dependencies import get_db
|
||||
|
||||
|
||||
router = APIRouter(prefix="/api/consumption", tags=["Consumption"])
|
||||
|
||||
|
||||
class ConsumptionBase(BaseModel):
|
||||
scenario_id: int
|
||||
amount: PositiveFloat
|
||||
description: Optional[str] = None
|
||||
unit_name: Optional[str] = None
|
||||
unit_symbol: Optional[str] = None
|
||||
|
||||
@field_validator("unit_name", "unit_symbol")
|
||||
@classmethod
|
||||
def _normalize_text(cls, value: Optional[str]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
stripped = value.strip()
|
||||
return stripped or None
|
||||
|
||||
|
||||
class ConsumptionCreate(ConsumptionBase):
|
||||
pass
|
||||
|
||||
|
||||
class ConsumptionRead(ConsumptionBase):
|
||||
id: int
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
@router.post("/", response_model=ConsumptionRead, status_code=status.HTTP_201_CREATED)
|
||||
def create_consumption(item: ConsumptionCreate, db: Session = Depends(get_db)):
|
||||
db_item = Consumption(**item.model_dump())
|
||||
db.add(db_item)
|
||||
db.commit()
|
||||
db.refresh(db_item)
|
||||
return db_item
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ConsumptionRead])
|
||||
def list_consumption(db: Session = Depends(get_db)):
|
||||
return db.query(Consumption).all()
|
||||
119
routes/costs.py
119
routes/costs.py
@@ -1,119 +0,0 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel, ConfigDict, field_validator
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.capex import Capex
|
||||
from models.opex import Opex
|
||||
from routes.dependencies import get_db
|
||||
|
||||
router = APIRouter(prefix="/api/costs", tags=["Costs"])
|
||||
# Pydantic schemas for CAPEX and OPEX
|
||||
|
||||
|
||||
class _CostBase(BaseModel):
|
||||
scenario_id: int
|
||||
amount: float
|
||||
description: Optional[str] = None
|
||||
currency_code: Optional[str] = "USD"
|
||||
currency_id: Optional[int] = None
|
||||
|
||||
@field_validator("currency_code")
|
||||
@classmethod
|
||||
def _normalize_currency(cls, value: Optional[str]) -> str:
|
||||
code = (value or "USD").strip().upper()
|
||||
return code[:3] if len(code) > 3 else code
|
||||
|
||||
|
||||
class CapexCreate(_CostBase):
|
||||
pass
|
||||
|
||||
|
||||
class CapexRead(_CostBase):
|
||||
id: int
|
||||
# use from_attributes so Pydantic reads attributes off SQLAlchemy model
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
# optionally include nested currency info
|
||||
currency: Optional["CurrencyRead"] = None
|
||||
|
||||
|
||||
class OpexCreate(_CostBase):
|
||||
pass
|
||||
|
||||
|
||||
class OpexRead(_CostBase):
|
||||
id: int
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
currency: Optional["CurrencyRead"] = None
|
||||
|
||||
|
||||
class CurrencyRead(BaseModel):
|
||||
id: int
|
||||
code: str
|
||||
name: Optional[str] = None
|
||||
symbol: Optional[str] = None
|
||||
is_active: Optional[bool] = True
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
# forward refs
|
||||
CapexRead.model_rebuild()
|
||||
OpexRead.model_rebuild()
|
||||
|
||||
|
||||
# Capex endpoints
|
||||
@router.post("/capex", response_model=CapexRead)
|
||||
def create_capex(item: CapexCreate, db: Session = Depends(get_db)):
|
||||
payload = item.model_dump()
|
||||
# Prefer explicit currency_id if supplied
|
||||
cid = payload.get("currency_id")
|
||||
if not cid:
|
||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
||||
currency_cls = __import__(
|
||||
"models.currency", fromlist=["Currency"]).Currency
|
||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
||||
if currency is None:
|
||||
currency = currency_cls(code=code, name=code, symbol=None)
|
||||
db.add(currency)
|
||||
db.flush()
|
||||
payload["currency_id"] = currency.id
|
||||
db_item = Capex(**payload)
|
||||
db.add(db_item)
|
||||
db.commit()
|
||||
db.refresh(db_item)
|
||||
return db_item
|
||||
|
||||
|
||||
@router.get("/capex", response_model=List[CapexRead])
|
||||
def list_capex(db: Session = Depends(get_db)):
|
||||
return db.query(Capex).all()
|
||||
|
||||
|
||||
# Opex endpoints
|
||||
@router.post("/opex", response_model=OpexRead)
|
||||
def create_opex(item: OpexCreate, db: Session = Depends(get_db)):
|
||||
payload = item.model_dump()
|
||||
cid = payload.get("currency_id")
|
||||
if not cid:
|
||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
||||
currency_cls = __import__(
|
||||
"models.currency", fromlist=["Currency"]).Currency
|
||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
||||
if currency is None:
|
||||
currency = currency_cls(code=code, name=code, symbol=None)
|
||||
db.add(currency)
|
||||
db.flush()
|
||||
payload["currency_id"] = currency.id
|
||||
db_item = Opex(**payload)
|
||||
db.add(db_item)
|
||||
db.commit()
|
||||
db.refresh(db_item)
|
||||
return db_item
|
||||
|
||||
|
||||
@router.get("/opex", response_model=List[OpexRead])
|
||||
def list_opex(db: Session = Depends(get_db)):
|
||||
return db.query(Opex).all()
|
||||
@@ -1,17 +0,0 @@
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.currency import Currency
|
||||
from routes.dependencies import get_db
|
||||
|
||||
router = APIRouter(prefix="/api/currencies", tags=["Currencies"])
|
||||
|
||||
|
||||
@router.get("/", response_model=List[Dict[str, Any]])
|
||||
def list_currencies(db: Session = Depends(get_db)):
|
||||
results = []
|
||||
for c in db.query(Currency).filter_by(is_active=True).order_by(Currency.code).all():
|
||||
results.append({"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol})
|
||||
return results
|
||||
130
routes/dashboard.py
Normal file
130
routes/dashboard.py
Normal file
@@ -0,0 +1,130 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from routes.template_filters import create_templates
|
||||
|
||||
from dependencies import get_current_user, get_unit_of_work
|
||||
from models import ScenarioStatus, User
|
||||
from services.unit_of_work import UnitOfWork
|
||||
|
||||
router = APIRouter(tags=["Dashboard"])
|
||||
templates = create_templates()
|
||||
|
||||
|
||||
def _format_timestamp(moment: datetime | None) -> str | None:
|
||||
if moment is None:
|
||||
return None
|
||||
return moment.strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def _format_timestamp_with_time(moment: datetime | None) -> str | None:
|
||||
if moment is None:
|
||||
return None
|
||||
return moment.strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
|
||||
def _load_metrics(uow: UnitOfWork) -> dict[str, object]:
|
||||
if not uow.projects or not uow.scenarios or not uow.financial_inputs:
|
||||
raise RuntimeError("UnitOfWork repositories not initialised")
|
||||
total_projects = uow.projects.count()
|
||||
active_scenarios = uow.scenarios.count_by_status(ScenarioStatus.ACTIVE)
|
||||
pending_simulations = uow.scenarios.count_by_status(ScenarioStatus.DRAFT)
|
||||
last_import_at = uow.financial_inputs.latest_created_at()
|
||||
return {
|
||||
"total_projects": total_projects,
|
||||
"active_scenarios": active_scenarios,
|
||||
"pending_simulations": pending_simulations,
|
||||
"last_import": _format_timestamp(last_import_at),
|
||||
}
|
||||
|
||||
|
||||
def _load_recent_projects(uow: UnitOfWork) -> list:
|
||||
if not uow.projects:
|
||||
raise RuntimeError("Project repository not initialised")
|
||||
return list(uow.projects.recent(limit=5))
|
||||
|
||||
|
||||
def _load_simulation_updates(uow: UnitOfWork) -> list[dict[str, object]]:
|
||||
updates: list[dict[str, object]] = []
|
||||
if not uow.scenarios:
|
||||
raise RuntimeError("Scenario repository not initialised")
|
||||
scenarios = uow.scenarios.recent(limit=5, with_project=True)
|
||||
for scenario in scenarios:
|
||||
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||
timestamp_label = _format_timestamp_with_time(scenario.updated_at)
|
||||
updates.append(
|
||||
{
|
||||
"title": f"{scenario.name} · {scenario.status.value.title()}",
|
||||
"description": f"Latest update recorded for {project_name}.",
|
||||
"timestamp": scenario.updated_at,
|
||||
"timestamp_label": timestamp_label,
|
||||
}
|
||||
)
|
||||
return updates
|
||||
|
||||
|
||||
def _load_scenario_alerts(
|
||||
request: Request, uow: UnitOfWork
|
||||
) -> list[dict[str, object]]:
|
||||
alerts: list[dict[str, object]] = []
|
||||
|
||||
if not uow.scenarios:
|
||||
raise RuntimeError("Scenario repository not initialised")
|
||||
|
||||
drafts = uow.scenarios.list_by_status(
|
||||
ScenarioStatus.DRAFT, limit=3, with_project=True
|
||||
)
|
||||
for scenario in drafts:
|
||||
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||
alerts.append(
|
||||
{
|
||||
"title": f"Draft scenario: {scenario.name}",
|
||||
"message": f"{project_name} has a scenario awaiting validation.",
|
||||
"link": request.url_for(
|
||||
"projects.view_project", project_id=scenario.project_id
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
if not alerts:
|
||||
archived = uow.scenarios.list_by_status(
|
||||
ScenarioStatus.ARCHIVED, limit=3, with_project=True
|
||||
)
|
||||
for scenario in archived:
|
||||
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||
alerts.append(
|
||||
{
|
||||
"title": f"Archived scenario: {scenario.name}",
|
||||
"message": f"Review archived scenario insights for {project_name}.",
|
||||
"link": request.url_for(
|
||||
"scenarios.view_scenario", scenario_id=scenario.id
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return alerts
|
||||
|
||||
|
||||
@router.get("/", include_in_schema=False, name="dashboard.home", response_model=None)
|
||||
def dashboard_home(
|
||||
request: Request,
|
||||
user: User | None = Depends(get_current_user),
|
||||
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||
) -> HTMLResponse | RedirectResponse:
|
||||
if user is None:
|
||||
return RedirectResponse(request.url_for("auth.login_form"), status_code=303)
|
||||
|
||||
context = {
|
||||
"metrics": _load_metrics(uow),
|
||||
"recent_projects": _load_recent_projects(uow),
|
||||
"simulation_updates": _load_simulation_updates(uow),
|
||||
"scenario_alerts": _load_scenario_alerts(request, uow),
|
||||
"export_modals": {
|
||||
"projects": request.url_for("exports.modal", dataset="projects"),
|
||||
"scenarios": request.url_for("exports.modal", dataset="scenarios"),
|
||||
},
|
||||
}
|
||||
return templates.TemplateResponse(request, "dashboard.html", context)
|
||||
@@ -1,13 +0,0 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from config.database import SessionLocal
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
@@ -1,36 +0,0 @@
|
||||
from typing import Dict, List
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.distribution import Distribution
|
||||
from routes.dependencies import get_db
|
||||
|
||||
router = APIRouter(prefix="/api/distributions", tags=["Distributions"])
|
||||
|
||||
|
||||
class DistributionCreate(BaseModel):
|
||||
name: str
|
||||
distribution_type: str
|
||||
parameters: Dict[str, float | int]
|
||||
|
||||
|
||||
class DistributionRead(DistributionCreate):
|
||||
id: int
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
@router.post("/", response_model=DistributionRead)
|
||||
async def create_distribution(dist: DistributionCreate, db: Session = Depends(get_db)):
|
||||
db_dist = Distribution(**dist.model_dump())
|
||||
db.add(db_dist)
|
||||
db.commit()
|
||||
db.refresh(db_dist)
|
||||
return db_dist
|
||||
|
||||
|
||||
@router.get("/", response_model=List[DistributionRead])
|
||||
async def list_distributions(db: Session = Depends(get_db)):
|
||||
dists = db.query(Distribution).all()
|
||||
return dists
|
||||
@@ -1,36 +0,0 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.equipment import Equipment
|
||||
from routes.dependencies import get_db
|
||||
|
||||
router = APIRouter(prefix="/api/equipment", tags=["Equipment"])
|
||||
# Pydantic schemas
|
||||
|
||||
|
||||
class EquipmentCreate(BaseModel):
|
||||
scenario_id: int
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class EquipmentRead(EquipmentCreate):
|
||||
id: int
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
@router.post("/", response_model=EquipmentRead)
|
||||
async def create_equipment(item: EquipmentCreate, db: Session = Depends(get_db)):
|
||||
db_item = Equipment(**item.model_dump())
|
||||
db.add(db_item)
|
||||
db.commit()
|
||||
db.refresh(db_item)
|
||||
return db_item
|
||||
|
||||
|
||||
@router.get("/", response_model=List[EquipmentRead])
|
||||
async def list_equipment(db: Session = Depends(get_db)):
|
||||
return db.query(Equipment).all()
|
||||
363
routes/exports.py
Normal file
363
routes/exports.py
Normal file
@@ -0,0 +1,363 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
|
||||
from fastapi.responses import HTMLResponse, StreamingResponse
|
||||
|
||||
from dependencies import get_unit_of_work, require_any_role
|
||||
from schemas.exports import (
|
||||
ExportFormat,
|
||||
ProjectExportRequest,
|
||||
ScenarioExportRequest,
|
||||
)
|
||||
from services.export_serializers import (
|
||||
export_projects_to_excel,
|
||||
export_scenarios_to_excel,
|
||||
stream_projects_to_csv,
|
||||
stream_scenarios_to_csv,
|
||||
)
|
||||
from services.unit_of_work import UnitOfWork
|
||||
from models.import_export_log import ImportExportLog
|
||||
from monitoring.metrics import observe_export
|
||||
from routes.template_filters import create_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/exports", tags=["exports"])
|
||||
templates = create_templates()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/modal/{dataset}",
|
||||
response_model=None,
|
||||
response_class=HTMLResponse,
|
||||
include_in_schema=False,
|
||||
name="exports.modal",
|
||||
)
|
||||
async def export_modal(
|
||||
dataset: str,
|
||||
request: Request,
|
||||
) -> HTMLResponse:
|
||||
dataset = dataset.lower()
|
||||
if dataset not in {"projects", "scenarios"}:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail="Unknown dataset")
|
||||
|
||||
submit_url = request.url_for(
|
||||
"export_projects" if dataset == "projects" else "export_scenarios"
|
||||
)
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"exports/modal.html",
|
||||
{
|
||||
"dataset": dataset,
|
||||
"submit_url": submit_url,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _timestamp_suffix() -> str:
|
||||
return datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
|
||||
|
||||
|
||||
def _ensure_repository(repo, name: str):
|
||||
if repo is None:
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"{name} repository unavailable")
|
||||
return repo
|
||||
|
||||
|
||||
def _record_export_audit(
|
||||
*,
|
||||
uow: UnitOfWork,
|
||||
dataset: str,
|
||||
status: str,
|
||||
export_format: ExportFormat,
|
||||
row_count: int,
|
||||
filename: str | None,
|
||||
) -> None:
|
||||
try:
|
||||
if uow.session is None:
|
||||
return
|
||||
log = ImportExportLog(
|
||||
action="export",
|
||||
dataset=dataset,
|
||||
status=status,
|
||||
filename=filename,
|
||||
row_count=row_count,
|
||||
detail=f"format={export_format.value}",
|
||||
)
|
||||
uow.session.add(log)
|
||||
uow.commit()
|
||||
except Exception:
|
||||
# best-effort auditing, do not break exports
|
||||
if uow.session is not None:
|
||||
uow.session.rollback()
|
||||
logger.exception(
|
||||
"export.audit.failed",
|
||||
extra={
|
||||
"event": "export.audit",
|
||||
"dataset": dataset,
|
||||
"status": status,
|
||||
"format": export_format.value,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/projects",
|
||||
status_code=status.HTTP_200_OK,
|
||||
response_class=StreamingResponse,
|
||||
dependencies=[Depends(require_any_role(
|
||||
"admin", "project_manager", "analyst"))],
|
||||
)
|
||||
async def export_projects(
|
||||
request: ProjectExportRequest,
|
||||
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
|
||||
) -> Response:
|
||||
project_repo = _ensure_repository(
|
||||
getattr(uow, "projects", None), "Project")
|
||||
start = time.perf_counter()
|
||||
try:
|
||||
projects = project_repo.filtered_for_export(request.filters)
|
||||
except ValueError as exc:
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="projects",
|
||||
status="failure",
|
||||
export_format=request.format,
|
||||
row_count=0,
|
||||
filename=None,
|
||||
)
|
||||
logger.warning(
|
||||
"export.validation_failed",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "projects",
|
||||
"status": "validation_failed",
|
||||
"format": request.format.value,
|
||||
"error": str(exc),
|
||||
},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
except Exception as exc:
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="projects",
|
||||
status="failure",
|
||||
export_format=request.format,
|
||||
row_count=0,
|
||||
filename=None,
|
||||
)
|
||||
logger.exception(
|
||||
"export.failed",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "projects",
|
||||
"status": "failure",
|
||||
"format": request.format.value,
|
||||
},
|
||||
)
|
||||
raise exc
|
||||
|
||||
filename = f"projects-{_timestamp_suffix()}"
|
||||
|
||||
if request.format == ExportFormat.CSV:
|
||||
stream = stream_projects_to_csv(projects)
|
||||
response = StreamingResponse(stream, media_type="text/csv")
|
||||
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="projects",
|
||||
status="success",
|
||||
export_format=request.format,
|
||||
row_count=len(projects),
|
||||
filename=f"{filename}.csv",
|
||||
)
|
||||
logger.info(
|
||||
"export",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "projects",
|
||||
"status": "success",
|
||||
"format": request.format.value,
|
||||
"row_count": len(projects),
|
||||
"filename": f"{filename}.csv",
|
||||
},
|
||||
)
|
||||
observe_export(
|
||||
dataset="projects",
|
||||
status="success",
|
||||
export_format=request.format.value,
|
||||
seconds=time.perf_counter() - start,
|
||||
)
|
||||
return response
|
||||
|
||||
data = export_projects_to_excel(projects)
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="projects",
|
||||
status="success",
|
||||
export_format=request.format,
|
||||
row_count=len(projects),
|
||||
filename=f"{filename}.xlsx",
|
||||
)
|
||||
logger.info(
|
||||
"export",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "projects",
|
||||
"status": "success",
|
||||
"format": request.format.value,
|
||||
"row_count": len(projects),
|
||||
"filename": f"{filename}.xlsx",
|
||||
},
|
||||
)
|
||||
observe_export(
|
||||
dataset="projects",
|
||||
status="success",
|
||||
export_format=request.format.value,
|
||||
seconds=time.perf_counter() - start,
|
||||
)
|
||||
return StreamingResponse(
|
||||
iter([data]),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={filename}.xlsx",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/scenarios",
|
||||
status_code=status.HTTP_200_OK,
|
||||
response_class=StreamingResponse,
|
||||
dependencies=[Depends(require_any_role(
|
||||
"admin", "project_manager", "analyst"))],
|
||||
)
|
||||
async def export_scenarios(
|
||||
request: ScenarioExportRequest,
|
||||
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
|
||||
) -> Response:
|
||||
scenario_repo = _ensure_repository(
|
||||
getattr(uow, "scenarios", None), "Scenario")
|
||||
start = time.perf_counter()
|
||||
try:
|
||||
scenarios = scenario_repo.filtered_for_export(
|
||||
request.filters, include_project=True)
|
||||
except ValueError as exc:
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="scenarios",
|
||||
status="failure",
|
||||
export_format=request.format,
|
||||
row_count=0,
|
||||
filename=None,
|
||||
)
|
||||
logger.warning(
|
||||
"export.validation_failed",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "scenarios",
|
||||
"status": "validation_failed",
|
||||
"format": request.format.value,
|
||||
"error": str(exc),
|
||||
},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
except Exception as exc:
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="scenarios",
|
||||
status="failure",
|
||||
export_format=request.format,
|
||||
row_count=0,
|
||||
filename=None,
|
||||
)
|
||||
logger.exception(
|
||||
"export.failed",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "scenarios",
|
||||
"status": "failure",
|
||||
"format": request.format.value,
|
||||
},
|
||||
)
|
||||
raise exc
|
||||
|
||||
filename = f"scenarios-{_timestamp_suffix()}"
|
||||
|
||||
if request.format == ExportFormat.CSV:
|
||||
stream = stream_scenarios_to_csv(scenarios)
|
||||
response = StreamingResponse(stream, media_type="text/csv")
|
||||
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="scenarios",
|
||||
status="success",
|
||||
export_format=request.format,
|
||||
row_count=len(scenarios),
|
||||
filename=f"{filename}.csv",
|
||||
)
|
||||
logger.info(
|
||||
"export",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "scenarios",
|
||||
"status": "success",
|
||||
"format": request.format.value,
|
||||
"row_count": len(scenarios),
|
||||
"filename": f"{filename}.csv",
|
||||
},
|
||||
)
|
||||
observe_export(
|
||||
dataset="scenarios",
|
||||
status="success",
|
||||
export_format=request.format.value,
|
||||
seconds=time.perf_counter() - start,
|
||||
)
|
||||
return response
|
||||
|
||||
data = export_scenarios_to_excel(scenarios)
|
||||
_record_export_audit(
|
||||
uow=uow,
|
||||
dataset="scenarios",
|
||||
status="success",
|
||||
export_format=request.format,
|
||||
row_count=len(scenarios),
|
||||
filename=f"{filename}.xlsx",
|
||||
)
|
||||
logger.info(
|
||||
"export",
|
||||
extra={
|
||||
"event": "export",
|
||||
"dataset": "scenarios",
|
||||
"status": "success",
|
||||
"format": request.format.value,
|
||||
"row_count": len(scenarios),
|
||||
"filename": f"{filename}.xlsx",
|
||||
},
|
||||
)
|
||||
observe_export(
|
||||
dataset="scenarios",
|
||||
status="success",
|
||||
export_format=request.format.value,
|
||||
seconds=time.perf_counter() - start,
|
||||
)
|
||||
return StreamingResponse(
|
||||
iter([data]),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={filename}.xlsx",
|
||||
},
|
||||
)
|
||||
170
routes/imports.py
Normal file
170
routes/imports.py
Normal file
@@ -0,0 +1,170 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status
|
||||
from fastapi import Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from dependencies import (
|
||||
get_import_ingestion_service,
|
||||
require_roles,
|
||||
require_roles_html,
|
||||
)
|
||||
from models import User
|
||||
from schemas.imports import (
|
||||
ImportCommitRequest,
|
||||
ProjectImportCommitResponse,
|
||||
ProjectImportPreviewResponse,
|
||||
ScenarioImportCommitResponse,
|
||||
ScenarioImportPreviewResponse,
|
||||
)
|
||||
from services.importers import ImportIngestionService, UnsupportedImportFormat
|
||||
from routes.template_filters import create_templates
|
||||
|
||||
router = APIRouter(prefix="/imports", tags=["Imports"])
|
||||
templates = create_templates()
|
||||
|
||||
MANAGE_ROLES = ("project_manager", "admin")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/ui",
|
||||
response_class=HTMLResponse,
|
||||
include_in_schema=False,
|
||||
name="imports.ui",
|
||||
)
|
||||
def import_dashboard(
|
||||
request: Request,
|
||||
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||
) -> HTMLResponse:
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"imports/ui.html",
|
||||
{
|
||||
"title": "Imports",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _read_upload_file(upload: UploadFile) -> BytesIO:
|
||||
content = await upload.read()
|
||||
if not content:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Uploaded file is empty.",
|
||||
)
|
||||
return BytesIO(content)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/projects/preview",
|
||||
response_model=ProjectImportPreviewResponse,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def preview_project_import(
|
||||
file: UploadFile = File(...,
|
||||
description="Project import file (CSV or Excel)"),
|
||||
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||
ingestion_service: ImportIngestionService = Depends(
|
||||
get_import_ingestion_service),
|
||||
) -> ProjectImportPreviewResponse:
|
||||
if not file.filename:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Filename is required for import.",
|
||||
)
|
||||
|
||||
stream = await _read_upload_file(file)
|
||||
|
||||
try:
|
||||
preview = ingestion_service.preview_projects(stream, file.filename)
|
||||
except UnsupportedImportFormat as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return ProjectImportPreviewResponse.model_validate(preview)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/scenarios/preview",
|
||||
response_model=ScenarioImportPreviewResponse,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def preview_scenario_import(
|
||||
file: UploadFile = File(...,
|
||||
description="Scenario import file (CSV or Excel)"),
|
||||
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||
ingestion_service: ImportIngestionService = Depends(
|
||||
get_import_ingestion_service),
|
||||
) -> ScenarioImportPreviewResponse:
|
||||
if not file.filename:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Filename is required for import.",
|
||||
)
|
||||
|
||||
stream = await _read_upload_file(file)
|
||||
|
||||
try:
|
||||
preview = ingestion_service.preview_scenarios(stream, file.filename)
|
||||
except UnsupportedImportFormat as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return ScenarioImportPreviewResponse.model_validate(preview)
|
||||
|
||||
|
||||
def _value_error_status(exc: ValueError) -> int:
|
||||
detail = str(exc)
|
||||
if detail.lower().startswith("unknown"):
|
||||
return status.HTTP_404_NOT_FOUND
|
||||
return status.HTTP_400_BAD_REQUEST
|
||||
|
||||
|
||||
@router.post(
|
||||
"/projects/commit",
|
||||
response_model=ProjectImportCommitResponse,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def commit_project_import_endpoint(
|
||||
payload: ImportCommitRequest,
|
||||
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||
ingestion_service: ImportIngestionService = Depends(
|
||||
get_import_ingestion_service),
|
||||
) -> ProjectImportCommitResponse:
|
||||
try:
|
||||
result = ingestion_service.commit_project_import(payload.token)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(
|
||||
status_code=_value_error_status(exc),
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return ProjectImportCommitResponse.model_validate(result)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/scenarios/commit",
|
||||
response_model=ScenarioImportCommitResponse,
|
||||
status_code=status.HTTP_200_OK,
|
||||
)
|
||||
async def commit_scenario_import_endpoint(
|
||||
payload: ImportCommitRequest,
|
||||
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||
ingestion_service: ImportIngestionService = Depends(
|
||||
get_import_ingestion_service),
|
||||
) -> ScenarioImportCommitResponse:
|
||||
try:
|
||||
result = ingestion_service.commit_scenario_import(payload.token)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(
|
||||
status_code=_value_error_status(exc),
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
return ScenarioImportCommitResponse.model_validate(result)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user