Compare commits
122 Commits
0fec805db1
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| cbaff5614a | |||
| f9feb51d33 | |||
| eb2687829f | |||
| ea101d1695 | |||
| 722f93b41c | |||
| e2e5e12f46 | |||
| 4e60168837 | |||
| dae3b59af9 | |||
| 839399363e | |||
| fa8a065138 | |||
| cd0c0ab416 | |||
| 854b1ac713 | |||
| 25fd13ce69 | |||
| 3746062819 | |||
| 958c165721 | |||
| 6e835c83eb | |||
| 75924fca84 | |||
| ac9ffddbde | |||
| 4e5a4c645d | |||
| e9678b6736 | |||
| e5e346b26a | |||
| b0e623d68e | |||
| 30dbc13fae | |||
| 31b9a1058a | |||
| bcd993d57c | |||
| 1262a4a63f | |||
| fb6816de00 | |||
| 4d0e1a9989 | |||
| ed8e05147c | |||
| 522b1e4105 | |||
| 4f00bf0d3c | |||
| 3551b0356d | |||
| 521a8abc2d | |||
| 1feae7ff85 | |||
| 1240b08740 | |||
| d9fd82b2e3 | |||
| 6c1570a254 | |||
| b1a6df9f90 | |||
| 6d496a599e | |||
| 1199813da0 | |||
| acf6f50bbd | |||
| ad306bd0aa | |||
| ed4187970c | |||
| 0fbe9f543e | |||
| 80825c2c5d | |||
| 44a3bfc1bf | |||
| 1f892ebdbb | |||
| bcdc9e861e | |||
| 23523f70f1 | |||
| 8ef6724960 | |||
| 6e466a3fd2 | |||
| 9d4c807475 | |||
| 9cd555e134 | |||
| e72e297c61 | |||
| 101d9309fd | |||
| 9556f9e1f1 | |||
| 4488cacdc9 | |||
| e06a6ae068 | |||
| 3bdae3c54c | |||
| d89b09fa80 | |||
| 2214bbe64f | |||
| 5d6592d657 | |||
| 3988171b46 | |||
| 1520724cab | |||
| 014d96c105 | |||
| 55fa1f56c1 | |||
| 53eacc352e | |||
| 2bfa498624 | |||
| 4cfc5d9ffa | |||
| ce7f4aa776 | |||
| e0497f58f0 | |||
| 60410fd71d | |||
| f55c77312d | |||
| 63ec4a6953 | |||
| b0ff79ae9c | |||
| 0670d05722 | |||
| 0694d4ec4b | |||
| ce9c174b53 | |||
| f68321cd04 | |||
| 44ff4d0e62 | |||
| 4364927965 | |||
| 795a9f99f4 | |||
| 032e6d2681 | |||
| 51c0fcec95 | |||
| 3051f91ab0 | |||
| e2465188c2 | |||
| 43b1e53837 | |||
| 4b33a5dba3 | |||
| 5f183faa63 | |||
| 1a7581cda0 | |||
| b1a0153a8d | |||
| 609b0d779f | |||
| eaef99f0ac | |||
| 3bc124c11f | |||
| 7058eb4172 | |||
| e0fa3861a6 | |||
| ab328b1a0b | |||
| 24cb3c2f57 | |||
| 118657491c | |||
| 0f79864188 | |||
| 27262bdfa3 | |||
| 3601c2e422 | |||
| 53879a411f | |||
| 2d848c2e09 | |||
| dad862e48e | |||
| 400f85c907 | |||
| 7f5ed6a42d | |||
| 053da332ac | |||
| 02da881d3e | |||
| c39dde3198 | |||
| faea6777a0 | |||
| d36611606d | |||
| 191500aeb7 | |||
| 61b42b3041 | |||
| 8bf46b80c8 | |||
| c69f933684 | |||
| c6fdc2d923 | |||
| dc3ebfbba5 | |||
| 32a96a27c5 | |||
| 203a5d08f2 | |||
| c6a0eb2588 | |||
| d807a50f77 |
25
.env.development
Normal file
25
.env.development
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Development Environment Configuration
|
||||||
|
ENVIRONMENT=development
|
||||||
|
DEBUG=true
|
||||||
|
LOG_LEVEL=DEBUG
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
DATABASE_HOST=postgres
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_USER=calminer
|
||||||
|
DATABASE_PASSWORD=calminer_password
|
||||||
|
DATABASE_NAME=calminer_db
|
||||||
|
DATABASE_DRIVER=postgresql
|
||||||
|
|
||||||
|
# Application Settings
|
||||||
|
CALMINER_EXPORT_MAX_ROWS=1000
|
||||||
|
CALMINER_IMPORT_MAX_ROWS=10000
|
||||||
|
CALMINER_EXPORT_METADATA=true
|
||||||
|
CALMINER_IMPORT_STAGING_TTL=300
|
||||||
|
|
||||||
|
# Admin Seeding (for development)
|
||||||
|
CALMINER_SEED_ADMIN_EMAIL=admin@calminer.local
|
||||||
|
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||||
|
CALMINER_SEED_ADMIN_PASSWORD=ChangeMe123!
|
||||||
|
CALMINER_SEED_ADMIN_ROLES=admin
|
||||||
|
CALMINER_SEED_FORCE=false
|
||||||
12
.env.example
12
.env.example
@@ -10,5 +10,13 @@ DATABASE_NAME=calminer
|
|||||||
# Optional: set a schema (comma-separated for multiple entries)
|
# Optional: set a schema (comma-separated for multiple entries)
|
||||||
# DATABASE_SCHEMA=public
|
# DATABASE_SCHEMA=public
|
||||||
|
|
||||||
# Legacy fallback (still supported, but granular settings are preferred)
|
# Default administrative credentials are provided at deployment time through environment variables
|
||||||
# DATABASE_URL=postgresql://<user>:<password>@localhost:5432/calminer
|
# (`CALMINER_SEED_ADMIN_EMAIL`, `CALMINER_SEED_ADMIN_USERNAME`, `CALMINER_SEED_ADMIN_PASSWORD`, `CALMINER_SEED_ADMIN_ROLES`).
|
||||||
|
# These values are consumed by a shared bootstrap helper on application startup, ensuring mandatory roles and the administrator account exist before any user interaction.
|
||||||
|
CALMINER_SEED_ADMIN_EMAIL=<email>
|
||||||
|
CALMINER_SEED_ADMIN_USERNAME=<username>
|
||||||
|
CALMINER_SEED_ADMIN_PASSWORD=<password>
|
||||||
|
CALMINER_SEED_ADMIN_ROLES=<roles>
|
||||||
|
# Operators can request a managed credential reset by setting `CALMINER_SEED_FORCE=true`.
|
||||||
|
# On the next startup the helper rotates the admin password and reapplies role assignments, so downstream environments must update stored secrets immediately after the reset.
|
||||||
|
# CALMINER_SEED_FORCE=false
|
||||||
25
.env.production
Normal file
25
.env.production
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Production Environment Configuration
|
||||||
|
ENVIRONMENT=production
|
||||||
|
DEBUG=false
|
||||||
|
LOG_LEVEL=WARNING
|
||||||
|
|
||||||
|
# Database Configuration (MUST be set externally - no defaults)
|
||||||
|
DATABASE_HOST=
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_USER=
|
||||||
|
DATABASE_PASSWORD=
|
||||||
|
DATABASE_NAME=
|
||||||
|
DATABASE_DRIVER=postgresql
|
||||||
|
|
||||||
|
# Application Settings
|
||||||
|
CALMINER_EXPORT_MAX_ROWS=100000
|
||||||
|
CALMINER_IMPORT_MAX_ROWS=100000
|
||||||
|
CALMINER_EXPORT_METADATA=true
|
||||||
|
CALMINER_IMPORT_STAGING_TTL=3600
|
||||||
|
|
||||||
|
# Admin Seeding (for production - set strong password)
|
||||||
|
CALMINER_SEED_ADMIN_EMAIL=admin@calminer.com
|
||||||
|
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||||
|
CALMINER_SEED_ADMIN_PASSWORD=CHANGE_THIS_VERY_STRONG_PASSWORD
|
||||||
|
CALMINER_SEED_ADMIN_ROLES=admin
|
||||||
|
CALMINER_SEED_FORCE=false
|
||||||
25
.env.staging
Normal file
25
.env.staging
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Staging Environment Configuration
|
||||||
|
ENVIRONMENT=staging
|
||||||
|
DEBUG=false
|
||||||
|
LOG_LEVEL=INFO
|
||||||
|
|
||||||
|
# Database Configuration (override with actual staging values)
|
||||||
|
DATABASE_HOST=postgres
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_USER=calminer_staging
|
||||||
|
DATABASE_PASSWORD=CHANGE_THIS_STRONG_PASSWORD
|
||||||
|
DATABASE_NAME=calminer_staging_db
|
||||||
|
DATABASE_DRIVER=postgresql
|
||||||
|
|
||||||
|
# Application Settings
|
||||||
|
CALMINER_EXPORT_MAX_ROWS=50000
|
||||||
|
CALMINER_IMPORT_MAX_ROWS=50000
|
||||||
|
CALMINER_EXPORT_METADATA=true
|
||||||
|
CALMINER_IMPORT_STAGING_TTL=600
|
||||||
|
|
||||||
|
# Admin Seeding (for staging)
|
||||||
|
CALMINER_SEED_ADMIN_EMAIL=admin@staging.calminer.com
|
||||||
|
CALMINER_SEED_ADMIN_USERNAME=admin
|
||||||
|
CALMINER_SEED_ADMIN_PASSWORD=CHANGE_THIS_STRONG_PASSWORD
|
||||||
|
CALMINER_SEED_ADMIN_ROLES=admin
|
||||||
|
CALMINER_SEED_FORCE=false
|
||||||
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
* text=auto
|
||||||
|
|
||||||
|
Dockerfile text eol=lf
|
||||||
232
.gitea/workflows/ci-build.yml
Normal file
232
.gitea/workflows/ci-build.yml
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
name: CI - Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
outputs:
|
||||||
|
allow_push: ${{ steps.meta.outputs.allow_push }}
|
||||||
|
ref_name: ${{ steps.meta.outputs.ref_name }}
|
||||||
|
event_name: ${{ steps.meta.outputs.event_name }}
|
||||||
|
sha: ${{ steps.meta.outputs.sha }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
DEFAULT_BRANCH: main
|
||||||
|
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||||
|
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
||||||
|
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
||||||
|
REGISTRY_CONTAINER_NAME: calminer
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Collect workflow metadata
|
||||||
|
id: meta
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DEFAULT_BRANCH: ${{ env.DEFAULT_BRANCH }}
|
||||||
|
run: |
|
||||||
|
git_ref="${GITEA_REF:-${GITHUB_REF:-}}"
|
||||||
|
ref_name="${GITEA_REF_NAME:-${GITHUB_REF_NAME:-}}"
|
||||||
|
if [ -z "$ref_name" ] && [ -n "$git_ref" ]; then
|
||||||
|
ref_name="${git_ref##*/}"
|
||||||
|
fi
|
||||||
|
event_name="${GITEA_EVENT_NAME:-${GITHUB_EVENT_NAME:-}}"
|
||||||
|
sha="${GITEA_SHA:-${GITHUB_SHA:-}}"
|
||||||
|
if [ -z "$sha" ]; then
|
||||||
|
sha="$(git rev-parse HEAD)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ] && [ "$event_name" != "pull_request" ]; then
|
||||||
|
echo "allow_push=true" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "allow_push=false" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "ref_name=$ref_name" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "event_name=$event_name" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "sha=$sha" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Validate registry configuration
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ -z "${REGISTRY_URL}" ]; then
|
||||||
|
echo "::error::REGISTRY_URL secret not configured. Configure it with your Gitea container registry host." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
server_url="${GITEA_SERVER_URL:-${GITHUB_SERVER_URL:-}}"
|
||||||
|
server_host="${server_url#http://}"
|
||||||
|
server_host="${server_host#https://}"
|
||||||
|
server_host="${server_host%%/*}"
|
||||||
|
server_host="${server_host%%:*}"
|
||||||
|
registry_host="${REGISTRY_URL#http://}"
|
||||||
|
registry_host="${registry_host#https://}"
|
||||||
|
registry_host="${registry_host%%/*}"
|
||||||
|
registry_host="${registry_host%%:*}"
|
||||||
|
if [ -n "${server_host}" ] && ! printf '%s' "${registry_host}" | grep -qi "${server_host}"; then
|
||||||
|
echo "::warning::REGISTRY_URL (${REGISTRY_URL}) does not match current Gitea host (${server_host}). Ensure this registry endpoint is managed by Gitea." >&2
|
||||||
|
fi
|
||||||
|
registry_repository="${registry_host}/allucanget/${REGISTRY_CONTAINER_NAME}"
|
||||||
|
echo "REGISTRY_HOST=${registry_host}" >> "$GITHUB_ENV"
|
||||||
|
echo "REGISTRY_REPOSITORY=${registry_repository}" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: Set up QEMU and Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to gitea registry
|
||||||
|
if: ${{ steps.meta.outputs.allow_push == 'true' }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY_HOST }}
|
||||||
|
username: ${{ env.REGISTRY_USERNAME }}
|
||||||
|
password: ${{ env.REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
id: build-image
|
||||||
|
env:
|
||||||
|
REGISTRY_REPOSITORY: ${{ env.REGISTRY_REPOSITORY }}
|
||||||
|
REGISTRY_CONTAINER_NAME: ${{ env.REGISTRY_CONTAINER_NAME }}
|
||||||
|
SHA_TAG: ${{ steps.meta.outputs.sha }}
|
||||||
|
PUSH_IMAGE: ${{ steps.meta.outputs.allow_push == 'true' && env.REGISTRY_HOST != '' && env.REGISTRY_USERNAME != '' && env.REGISTRY_PASSWORD != '' }}
|
||||||
|
run: |
|
||||||
|
set -eo pipefail
|
||||||
|
LOG_FILE=build.log
|
||||||
|
if [ "${PUSH_IMAGE}" = "true" ]; then
|
||||||
|
docker buildx build \
|
||||||
|
--load \
|
||||||
|
--tag "${REGISTRY_REPOSITORY}:latest" \
|
||||||
|
--tag "${REGISTRY_REPOSITORY}:${SHA_TAG}" \
|
||||||
|
--file Dockerfile \
|
||||||
|
. 2>&1 | tee "${LOG_FILE}"
|
||||||
|
else
|
||||||
|
docker buildx build \
|
||||||
|
--load \
|
||||||
|
--tag "${REGISTRY_CONTAINER_NAME}:ci" \
|
||||||
|
--file Dockerfile \
|
||||||
|
. 2>&1 | tee "${LOG_FILE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Push image
|
||||||
|
if: ${{ steps.meta.outputs.allow_push == 'true' }}
|
||||||
|
env:
|
||||||
|
REGISTRY_REPOSITORY: ${{ env.REGISTRY_REPOSITORY }}
|
||||||
|
SHA_TAG: ${{ steps.meta.outputs.sha }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ -z "${REGISTRY_REPOSITORY}" ]; then
|
||||||
|
echo "::error::REGISTRY_REPOSITORY not defined; cannot push image" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
docker push "${REGISTRY_REPOSITORY}:${SHA_TAG}"
|
||||||
|
docker push "${REGISTRY_REPOSITORY}:latest"
|
||||||
|
|
||||||
|
- name: Upload docker build logs
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: docker-build-logs
|
||||||
|
path: build.log
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
needs: build
|
||||||
|
if: needs.build.outputs.allow_push == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||||
|
REGISTRY_CONTAINER_NAME: calminer
|
||||||
|
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||||
|
STAGING_KUBE_CONFIG: ${{ secrets.STAGING_KUBE_CONFIG }}
|
||||||
|
PROD_KUBE_CONFIG: ${{ secrets.PROD_KUBE_CONFIG }}
|
||||||
|
K8S_DEPLOY_ENABLED: ${{ secrets.K8S_DEPLOY_ENABLED }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Resolve registry repository
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ -z "${REGISTRY_URL}" ]; then
|
||||||
|
echo "::error::REGISTRY_URL secret not configured. Configure it with your Gitea container registry host." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
registry_host="${REGISTRY_URL#http://}"
|
||||||
|
registry_host="${registry_host#https://}"
|
||||||
|
registry_host="${registry_host%%/*}"
|
||||||
|
registry_host="${registry_host%%:*}"
|
||||||
|
registry_repository="${registry_host}/allucanget/${REGISTRY_CONTAINER_NAME}"
|
||||||
|
echo "REGISTRY_HOST=${registry_host}" >> "$GITHUB_ENV"
|
||||||
|
echo "REGISTRY_REPOSITORY=${registry_repository}" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: Report Kubernetes deployment toggle
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
enabled="${K8S_DEPLOY_ENABLED:-}"
|
||||||
|
if [ "${enabled}" = "true" ]; then
|
||||||
|
echo "Kubernetes deployment is enabled for this run."
|
||||||
|
else
|
||||||
|
echo "::notice::Kubernetes deployment steps are disabled (set secrets.K8S_DEPLOY_ENABLED to 'true' to enable)."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Capture commit metadata
|
||||||
|
id: commit_meta
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
message="$(git log -1 --pretty=%B | tr '\n' ' ')"
|
||||||
|
echo "message=$message" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Set up kubectl for staging
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||||
|
uses: azure/k8s-set-context@v3
|
||||||
|
with:
|
||||||
|
method: kubeconfig
|
||||||
|
kubeconfig: ${{ env.STAGING_KUBE_CONFIG }}
|
||||||
|
|
||||||
|
- name: Set up kubectl for production
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||||
|
uses: azure/k8s-set-context@v3
|
||||||
|
with:
|
||||||
|
method: kubeconfig
|
||||||
|
kubeconfig: ${{ env.PROD_KUBE_CONFIG }}
|
||||||
|
|
||||||
|
- name: Deploy to staging
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||||
|
run: |
|
||||||
|
kubectl set image deployment/calminer-app calminer=${REGISTRY_REPOSITORY}:latest
|
||||||
|
kubectl apply -f k8s/configmap.yaml
|
||||||
|
kubectl apply -f k8s/secret.yaml
|
||||||
|
kubectl rollout status deployment/calminer-app
|
||||||
|
|
||||||
|
- name: Collect staging deployment logs
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy staging]')
|
||||||
|
run: |
|
||||||
|
mkdir -p logs/deployment/staging
|
||||||
|
kubectl get pods -o wide > logs/deployment/staging/pods.txt
|
||||||
|
kubectl get deployment calminer-app -o yaml > logs/deployment/staging/deployment.yaml
|
||||||
|
kubectl logs deployment/calminer-app --all-containers=true --tail=500 > logs/deployment/staging/calminer-app.log
|
||||||
|
|
||||||
|
- name: Deploy to production
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||||
|
run: |
|
||||||
|
kubectl set image deployment/calminer-app calminer=${REGISTRY_REPOSITORY}:latest
|
||||||
|
kubectl apply -f k8s/configmap.yaml
|
||||||
|
kubectl apply -f k8s/secret.yaml
|
||||||
|
kubectl rollout status deployment/calminer-app
|
||||||
|
|
||||||
|
- name: Collect production deployment logs
|
||||||
|
if: env.K8S_DEPLOY_ENABLED == 'true' && contains(steps.commit_meta.outputs.message, '[deploy production]')
|
||||||
|
run: |
|
||||||
|
mkdir -p logs/deployment/production
|
||||||
|
kubectl get pods -o wide > logs/deployment/production/pods.txt
|
||||||
|
kubectl get deployment calminer-app -o yaml > logs/deployment/production/deployment.yaml
|
||||||
|
kubectl logs deployment/calminer-app --all-containers=true --tail=500 > logs/deployment/production/calminer-app.log
|
||||||
|
|
||||||
|
- name: Upload deployment logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: deployment-logs
|
||||||
|
path: logs/deployment
|
||||||
|
if-no-files-found: ignore
|
||||||
44
.gitea/workflows/ci-lint.yml
Normal file
44
.gitea/workflows/ci-lint.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: CI - Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
APT_CACHER_NG: http://192.168.88.14:3142
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Configure apt proxy
|
||||||
|
run: |
|
||||||
|
if [ -n "${APT_CACHER_NG}" ]; then
|
||||||
|
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install system packages
|
||||||
|
run: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y build-essential libpq-dev
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip install -r requirements-test.txt
|
||||||
|
|
||||||
|
- name: Run Ruff
|
||||||
|
run: ruff check .
|
||||||
|
|
||||||
|
- name: Run Black
|
||||||
|
run: black --check .
|
||||||
|
|
||||||
|
- name: Run Bandit
|
||||||
|
run: bandit -c pyproject.toml -r tests
|
||||||
73
.gitea/workflows/ci-test.yml
Normal file
73
.gitea/workflows/ci-test.yml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
name: CI - Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
APT_CACHER_NG: http://192.168.88.14:3142
|
||||||
|
DB_DRIVER: postgresql+psycopg2
|
||||||
|
DB_HOST: 192.168.88.35
|
||||||
|
DB_NAME: calminer_test
|
||||||
|
DB_USER: calminer
|
||||||
|
DB_PASSWORD: calminer_password
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: ${{ env.DB_USER }}
|
||||||
|
POSTGRES_PASSWORD: ${{ env.DB_PASSWORD }}
|
||||||
|
POSTGRES_DB: ${{ env.DB_NAME }}
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Configure apt proxy
|
||||||
|
run: |
|
||||||
|
if [ -n "${APT_CACHER_NG}" ]; then
|
||||||
|
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install system packages
|
||||||
|
run: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y build-essential libpq-dev
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip install -r requirements-test.txt
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: ${{ env.DB_DRIVER }}
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_USER: ${{ env.DB_USER }}
|
||||||
|
DATABASE_PASSWORD: ${{ env.DB_PASSWORD }}
|
||||||
|
DATABASE_NAME: ${{ env.DB_NAME }}
|
||||||
|
run: |
|
||||||
|
pytest --cov=. --cov-report=term-missing --cov-report=xml --cov-fail-under=80 --junitxml=pytest-report.xml
|
||||||
|
|
||||||
|
- name: Upload test artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: test-artifacts
|
||||||
|
path: |
|
||||||
|
coverage.xml
|
||||||
|
pytest-report.xml
|
||||||
30
.gitea/workflows/ci.yml
Normal file
30
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
- v2
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
uses: ./.gitea/workflows/ci-lint.yml
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
test:
|
||||||
|
needs: lint
|
||||||
|
uses: ./.gitea/workflows/ci-test.yml
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs:
|
||||||
|
- lint
|
||||||
|
- test
|
||||||
|
uses: ./.gitea/workflows/ci-build.yml
|
||||||
|
secrets: inherit
|
||||||
@@ -1,141 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, develop]
|
|
||||||
pull_request:
|
|
||||||
branches: [main, develop]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
env:
|
|
||||||
APT_CACHER_NG: http://192.168.88.14:3142
|
|
||||||
DB_DRIVER: postgresql+psycopg2
|
|
||||||
DB_HOST: 192.168.88.35
|
|
||||||
DB_NAME: calminer_test
|
|
||||||
DB_USER: calminer
|
|
||||||
DB_PASSWORD: calminer_password
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:17
|
|
||||||
env:
|
|
||||||
POSTGRES_USER: ${{ env.DB_USER }}
|
|
||||||
POSTGRES_PASSWORD: ${{ env.DB_PASSWORD }}
|
|
||||||
POSTGRES_DB: ${{ env.DB_NAME }}
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Get pip cache dir
|
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "path=$(pip cache dir)" >> $GITEA_OUTPUT
|
|
||||||
echo "Pip cache dir: $(pip cache dir)"
|
|
||||||
|
|
||||||
- name: Cache pip dependencies
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.path }}
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'requirements-test.txt') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
|
|
||||||
- name: Update apt-cacher-ng config
|
|
||||||
run: |-
|
|
||||||
echo 'Acquire::http::Proxy "{{ env.APT_CACHER_NG }}";' | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
|
||||||
apt-get update
|
|
||||||
|
|
||||||
- name: Update system packages
|
|
||||||
run: apt-get upgrade -y
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
pip install -r requirements-test.txt
|
|
||||||
|
|
||||||
- name: Install Playwright system dependencies
|
|
||||||
run: playwright install-deps
|
|
||||||
|
|
||||||
- name: Install Playwright browsers
|
|
||||||
run: playwright install
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
env:
|
|
||||||
DATABASE_DRIVER: ${{ env.DB_DRIVER }}
|
|
||||||
DATABASE_HOST: postgres
|
|
||||||
DATABASE_PORT: 5432
|
|
||||||
DATABASE_USER: ${{ env.DB_USER }}
|
|
||||||
DATABASE_PASSWORD: ${{ env.DB_PASSWORD }}
|
|
||||||
DATABASE_NAME: ${{ env.DB_NAME }}
|
|
||||||
run: |
|
|
||||||
pytest tests/ --cov=.
|
|
||||||
|
|
||||||
- name: Build Docker image
|
|
||||||
run: |
|
|
||||||
docker build -t calminer .
|
|
||||||
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: test
|
|
||||||
env:
|
|
||||||
DEFAULT_BRANCH: main
|
|
||||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
|
||||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
|
||||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
|
||||||
REGISTRY_CONTAINER_NAME: calminer
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Collect workflow metadata
|
|
||||||
id: meta
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
ref_name="${GITHUB_REF_NAME:-${GITHUB_REF##*/}}"
|
|
||||||
event_name="${GITHUB_EVENT_NAME:-}"
|
|
||||||
sha="${GITHUB_SHA:-}"
|
|
||||||
|
|
||||||
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ]; then
|
|
||||||
echo "on_default=true" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "on_default=false" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "ref_name=$ref_name" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "event_name=$event_name" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "sha=$sha" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Set up QEMU and Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Log in to gitea registry
|
|
||||||
if: ${{ steps.meta.outputs.on_default == 'true' }}
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY_URL }}
|
|
||||||
username: ${{ env.REGISTRY_USERNAME }}
|
|
||||||
password: ${{ env.REGISTRY_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push image
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: Dockerfile
|
|
||||||
push: ${{ steps.meta.outputs.on_default == 'true' && steps.meta.outputs.event_name != 'pull_request' && (env.REGISTRY_URL != '' && env.REGISTRY_USERNAME != '' && env.REGISTRY_PASSWORD != '') }}
|
|
||||||
tags: |
|
|
||||||
${{ env.REGISTRY_URL }}/allucanget/${{ env.REGISTRY_CONTAINER_NAME }}:latest
|
|
||||||
${{ env.REGISTRY_URL }}/allucanget/${{ env.REGISTRY_CONTAINER_NAME }}:${{ steps.meta.outputs.sha }}
|
|
||||||
78
.gitea/workflows/deploy-coolify.yml
Normal file
78
.gitea/workflows/deploy-coolify.yml
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
name: Deploy - Coolify
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
COOLIFY_BASE_URL: ${{ secrets.COOLIFY_BASE_URL }}
|
||||||
|
COOLIFY_API_TOKEN: ${{ secrets.COOLIFY_API_TOKEN }}
|
||||||
|
COOLIFY_APPLICATION_ID: ${{ secrets.COOLIFY_APPLICATION_ID }}
|
||||||
|
COOLIFY_DEPLOY_ENV: ${{ secrets.COOLIFY_DEPLOY_ENV }}
|
||||||
|
DOCKER_COMPOSE_PATH: docker-compose.prod.yml
|
||||||
|
ENV_FILE_PATH: deploy/.env
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Prepare compose bundle
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p deploy
|
||||||
|
cp "$DOCKER_COMPOSE_PATH" deploy/docker-compose.yml
|
||||||
|
if [ -n "$COOLIFY_DEPLOY_ENV" ]; then
|
||||||
|
printf '%s\n' "$COOLIFY_DEPLOY_ENV" > "$ENV_FILE_PATH"
|
||||||
|
elif [ ! -f "$ENV_FILE_PATH" ]; then
|
||||||
|
echo "::error::COOLIFY_DEPLOY_ENV secret not configured and deploy/.env missing" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Validate Coolify secrets
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
missing=0
|
||||||
|
for var in COOLIFY_BASE_URL COOLIFY_API_TOKEN COOLIFY_APPLICATION_ID; do
|
||||||
|
if [ -z "${!var}" ]; then
|
||||||
|
echo "::error::Missing required secret: $var"
|
||||||
|
missing=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [ "$missing" -eq 1 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Trigger deployment via Coolify API
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
api_url="$COOLIFY_BASE_URL/api/v1/deploy"
|
||||||
|
payload=$(jq -n --arg uuid "$COOLIFY_APPLICATION_ID" '{ uuid: $uuid }')
|
||||||
|
response=$(curl -sS -w '\n%{http_code}' \
|
||||||
|
-X POST "$api_url" \
|
||||||
|
-H "Authorization: Bearer $COOLIFY_API_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload")
|
||||||
|
body=$(echo "$response" | head -n -1)
|
||||||
|
status=$(echo "$response" | tail -n1)
|
||||||
|
echo "Deploy response status: $status"
|
||||||
|
echo "$body"
|
||||||
|
printf '%s' "$body" > deploy/coolify-response.json
|
||||||
|
if [ "$status" -ge 400 ]; then
|
||||||
|
echo "::error::Deployment request failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload deployment bundle
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: coolify-deploy-bundle
|
||||||
|
path: |
|
||||||
|
deploy/docker-compose.yml
|
||||||
|
deploy/.env
|
||||||
|
deploy/coolify-response.json
|
||||||
|
if-no-files-found: warn
|
||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -17,6 +17,7 @@ env/
|
|||||||
# environment variables
|
# environment variables
|
||||||
.env
|
.env
|
||||||
*.env
|
*.env
|
||||||
|
.env.*
|
||||||
# except example files
|
# except example files
|
||||||
!config/*.env.example
|
!config/*.env.example
|
||||||
|
|
||||||
@@ -46,8 +47,14 @@ htmlcov/
|
|||||||
logs/
|
logs/
|
||||||
|
|
||||||
# SQLite database
|
# SQLite database
|
||||||
|
data/
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
test*.db
|
test*.db
|
||||||
|
local*.db
|
||||||
|
|
||||||
# Act runner files
|
# Act runner files
|
||||||
.runner
|
.runner
|
||||||
|
|
||||||
|
# Devcontainer files
|
||||||
|
.devcontainer/devcontainer.json
|
||||||
|
.devcontainer/docker-compose.yml
|
||||||
|
|||||||
13
.pre-commit-config.yaml
Normal file
13
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.6.1
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
|
rev: 24.8.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
- repo: https://github.com/PyCQA/bandit
|
||||||
|
rev: 1.7.9
|
||||||
|
hooks:
|
||||||
|
- id: bandit
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"semi": true,
|
|
||||||
"singleQuote": true,
|
|
||||||
"trailingComma": "es5",
|
|
||||||
"printWidth": 80,
|
|
||||||
"tabWidth": 2,
|
|
||||||
"useTabs": false
|
|
||||||
}
|
|
||||||
46
Dockerfile
46
Dockerfile
@@ -41,8 +41,25 @@ if url:
|
|||||||
finally:
|
finally:
|
||||||
sock.close()
|
sock.close()
|
||||||
PY
|
PY
|
||||||
apt-get update
|
APT_PROXY_CONFIG=/etc/apt/apt.conf.d/01proxy
|
||||||
apt-get install -y --no-install-recommends build-essential gcc libpq-dev
|
|
||||||
|
apt_update_with_fallback() {
|
||||||
|
if ! apt-get update; then
|
||||||
|
rm -f "$APT_PROXY_CONFIG"
|
||||||
|
apt-get update
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
apt_install_with_fallback() {
|
||||||
|
if ! apt-get install -y --no-install-recommends "$@"; then
|
||||||
|
rm -f "$APT_PROXY_CONFIG"
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
apt_update_with_fallback
|
||||||
|
apt_install_with_fallback build-essential gcc libpq-dev
|
||||||
pip install --upgrade pip
|
pip install --upgrade pip
|
||||||
pip wheel --no-deps --wheel-dir /wheels -r requirements.txt
|
pip wheel --no-deps --wheel-dir /wheels -r requirements.txt
|
||||||
apt-get purge -y --auto-remove build-essential gcc
|
apt-get purge -y --auto-remove build-essential gcc
|
||||||
@@ -88,8 +105,25 @@ if url:
|
|||||||
finally:
|
finally:
|
||||||
sock.close()
|
sock.close()
|
||||||
PY
|
PY
|
||||||
apt-get update
|
APT_PROXY_CONFIG=/etc/apt/apt.conf.d/01proxy
|
||||||
apt-get install -y --no-install-recommends libpq5
|
|
||||||
|
apt_update_with_fallback() {
|
||||||
|
if ! apt-get update; then
|
||||||
|
rm -f "$APT_PROXY_CONFIG"
|
||||||
|
apt-get update
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
apt_install_with_fallback() {
|
||||||
|
if ! apt-get install -y --no-install-recommends "$@"; then
|
||||||
|
rm -f "$APT_PROXY_CONFIG"
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
apt_update_with_fallback
|
||||||
|
apt_install_with_fallback libpq5
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
@@ -108,4 +142,6 @@ USER appuser
|
|||||||
|
|
||||||
EXPOSE 8003
|
EXPOSE 8003
|
||||||
|
|
||||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]
|
ENTRYPOINT ["uvicorn"]
|
||||||
|
|
||||||
|
CMD ["main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]
|
||||||
|
|||||||
@@ -8,4 +8,6 @@ The system is designed to help mining companies make informed decisions by simul
|
|||||||
|
|
||||||
## Documentation & quickstart
|
## Documentation & quickstart
|
||||||
|
|
||||||
This repository contains only code. See detailed developer and architecture documentation in the [Docs](https://git.allucanget.biz/allucanget/calminer-docs) repository.
|
- Detailed developer, architecture, and operations guides live in the companion [calminer-docs](../calminer-docs/) repository. Please see the [README](../calminer-docs/README.md) there for instructions.
|
||||||
|
- For a local run, create a `.env` (see `.env.example`), install requirements, then execute `python -m scripts.init_db` followed by `uvicorn main:app --reload`. The initializer is safe to rerun and seeds demo data automatically.
|
||||||
|
- To wipe and recreate the schema in development, run `CALMINER_ENV=development python -m scripts.reset_db` before invoking the initializer again.
|
||||||
|
|||||||
124
changelog.md
Normal file
124
changelog.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-15
|
||||||
|
|
||||||
|
- Fixed dev container setup by reviewing logs, identifying mount errors, implementing fixes, and validating the configuration.
|
||||||
|
|
||||||
|
## 2025-11-14
|
||||||
|
|
||||||
|
- Completed Coolify deployment automation with workflow and documentation.
|
||||||
|
- Improved build workflow for registry authentication and tagging.
|
||||||
|
- Updated production compose and added deployment guidance.
|
||||||
|
- Added optional Kubernetes deployment toggle.
|
||||||
|
|
||||||
|
## 2025-11-13
|
||||||
|
|
||||||
|
- Aligned UI styles and ensured accessibility.
|
||||||
|
- Restructured navigation under project-scenario-calculation hierarchy.
|
||||||
|
- Reorganized documentation for better structure.
|
||||||
|
- Refactored navigation sidebar with database-driven data.
|
||||||
|
- Migrated sidebar rendering to API endpoint.
|
||||||
|
- Created templates for data import and export.
|
||||||
|
- Updated relationships for projects, scenarios, and profitability.
|
||||||
|
- Enhanced scenario frontend templates with project context.
|
||||||
|
- Scoped profitability calculator to scenario level.
|
||||||
|
- Added navigation links for opex planner.
|
||||||
|
- Documented opex planner features.
|
||||||
|
- Integrated opex calculations with persistence and tests.
|
||||||
|
- Implemented capex calculations end-to-end.
|
||||||
|
- Added basic profitability calculations.
|
||||||
|
- Developed reporting endpoints and templates.
|
||||||
|
- Integrated charting for visualizations.
|
||||||
|
- Performed manual testing of capex planner.
|
||||||
|
- Added unit tests for opex service.
|
||||||
|
- Added integration tests for opex.
|
||||||
|
|
||||||
|
## 2025-11-12
|
||||||
|
|
||||||
|
- Fixed reporting dashboard error by correcting route reference.
|
||||||
|
- Completed navigation validation by adding missing routes and templates for various pages.
|
||||||
|
- Fixed template rendering error with URL objects.
|
||||||
|
- Integrated charting for interactive visualizations.
|
||||||
|
- Verified local application startup and routes.
|
||||||
|
- Fixed docker-compose configuration.
|
||||||
|
- Verified deployment pipeline.
|
||||||
|
- Documented data models.
|
||||||
|
- Updated performance model to clear warnings.
|
||||||
|
- Replaced migration system with simpler initializer.
|
||||||
|
- Removed hardcoded secrets from tests.
|
||||||
|
- Centralized security scanning config.
|
||||||
|
- Fixed admin setup with migration.
|
||||||
|
- Resolved code style warnings.
|
||||||
|
- Enhanced deploy logging.
|
||||||
|
- Fixed CI template issue.
|
||||||
|
- Added SQLite database support.
|
||||||
|
|
||||||
|
## 2025-11-11
|
||||||
|
|
||||||
|
- Combined old migration files into one initial schema.
|
||||||
|
- Added base routing to redirect users to login or dashboard.
|
||||||
|
- Added end-to-end tests for login flow.
|
||||||
|
- Updated templates to use logo image consistently.
|
||||||
|
- Centralized currency validation across the app.
|
||||||
|
- Updated services to show friendly error messages.
|
||||||
|
- Linked projects to pricing settings.
|
||||||
|
- Bootstrapped pricing settings at startup.
|
||||||
|
- Extended pricing support with persisted data.
|
||||||
|
- Added financial helpers for NPV, IRR, payback.
|
||||||
|
- Documented financial metrics.
|
||||||
|
- Implemented Monte Carlo simulation engine.
|
||||||
|
- Cleaned up reporting contexts.
|
||||||
|
- Consolidated migration history.
|
||||||
|
- Added migration script and updated entrypoint.
|
||||||
|
- Configured test coverage.
|
||||||
|
- Standardized colors and typography.
|
||||||
|
- Improved navigation with chevron buttons.
|
||||||
|
- Established test suites with coverage.
|
||||||
|
- Configured CI pipelines for tests and security.
|
||||||
|
- Added deployment automation with Docker and Kubernetes.
|
||||||
|
- Completed monitoring instrumentation.
|
||||||
|
- Implemented performance monitoring.
|
||||||
|
- Added metric storage and endpoints.
|
||||||
|
- Created middleware for metrics.
|
||||||
|
- Extended monitoring router.
|
||||||
|
- Added migration for metrics table.
|
||||||
|
- Completed concurrent testing.
|
||||||
|
- Implemented deployment automation.
|
||||||
|
- Set up Kubernetes manifests.
|
||||||
|
- Configured CI/CD workflows.
|
||||||
|
- Documented deployment processes.
|
||||||
|
- Validated deployment setup.
|
||||||
|
|
||||||
|
## 2025-11-10
|
||||||
|
|
||||||
|
- Added tests for guard dependencies.
|
||||||
|
- Added integration tests for authorization.
|
||||||
|
- Implemented admin bootstrap settings.
|
||||||
|
- Retired old RBAC plan document.
|
||||||
|
- Completed authentication and RBAC features.
|
||||||
|
- Documented import/export field mappings.
|
||||||
|
- Added import service for CSV/Excel.
|
||||||
|
- Expanded import workflow with previews and commits.
|
||||||
|
- Added audit logging for imports/exports.
|
||||||
|
|
||||||
|
## 2025-11-09
|
||||||
|
|
||||||
|
- Captured implementation status and roadmap.
|
||||||
|
- Added core database models and migration setup.
|
||||||
|
- Introduced repository helpers for data operations.
|
||||||
|
- Added tests for repository behaviors.
|
||||||
|
- Exposed CRUD APIs for projects and scenarios.
|
||||||
|
- Connected routers to HTML views.
|
||||||
|
- Implemented client-side enhancements.
|
||||||
|
- Added scenario comparison validator.
|
||||||
|
- Delivered new dashboard experience.
|
||||||
|
- Extended repositories with utilities.
|
||||||
|
- Updated detail pages with new visuals.
|
||||||
|
- Fixed route registration issues.
|
||||||
|
- Added end-to-end tests for lifecycles.
|
||||||
|
- Updated template responses.
|
||||||
|
- Introduced security utilities.
|
||||||
|
- Added authentication routes.
|
||||||
|
- Implemented session middleware.
|
||||||
|
- Delivered seeding utilities.
|
||||||
|
- Secured routers with RBAC.
|
||||||
1
config/__init__.py
Normal file
1
config/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Configuration package."""
|
||||||
@@ -11,12 +11,21 @@ def _build_database_url() -> str:
|
|||||||
"""Construct the SQLAlchemy database URL from granular environment vars.
|
"""Construct the SQLAlchemy database URL from granular environment vars.
|
||||||
|
|
||||||
Falls back to `DATABASE_URL` for backward compatibility.
|
Falls back to `DATABASE_URL` for backward compatibility.
|
||||||
|
Supports SQLite when CALMINER_USE_SQLITE is set.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
legacy_url = os.environ.get("DATABASE_URL", "")
|
legacy_url = os.environ.get("DATABASE_URL", "")
|
||||||
if legacy_url and legacy_url.strip() != "":
|
if legacy_url and legacy_url.strip() != "":
|
||||||
return legacy_url
|
return legacy_url
|
||||||
|
|
||||||
|
use_sqlite = os.environ.get("CALMINER_USE_SQLITE", "").lower() in ("true", "1", "yes")
|
||||||
|
if use_sqlite:
|
||||||
|
# Use SQLite database
|
||||||
|
db_path = os.environ.get("DATABASE_PATH", "./data/calminer.db")
|
||||||
|
# Ensure the directory exists
|
||||||
|
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
||||||
|
return f"sqlite:///{db_path}"
|
||||||
|
|
||||||
driver = os.environ.get("DATABASE_DRIVER", "postgresql")
|
driver = os.environ.get("DATABASE_DRIVER", "postgresql")
|
||||||
host = os.environ.get("DATABASE_HOST")
|
host = os.environ.get("DATABASE_HOST")
|
||||||
port = os.environ.get("DATABASE_PORT", "5432")
|
port = os.environ.get("DATABASE_PORT", "5432")
|
||||||
@@ -54,7 +63,15 @@ def _build_database_url() -> str:
|
|||||||
DATABASE_URL = _build_database_url()
|
DATABASE_URL = _build_database_url()
|
||||||
|
|
||||||
engine = create_engine(DATABASE_URL, echo=True, future=True)
|
engine = create_engine(DATABASE_URL, echo=True, future=True)
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
# Avoid expiring ORM objects on commit so that objects returned from UnitOfWork
|
||||||
|
# remain usable for the duration of the request cycle without causing
|
||||||
|
# DetachedInstanceError when accessed after the session commits.
|
||||||
|
SessionLocal = sessionmaker(
|
||||||
|
autocommit=False,
|
||||||
|
autoflush=False,
|
||||||
|
bind=engine,
|
||||||
|
expire_on_commit=False,
|
||||||
|
)
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
233
config/settings.py
Normal file
233
config/settings.py
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from services.pricing import PricingMetadata
|
||||||
|
|
||||||
|
from services.security import JWTSettings
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class AdminBootstrapSettings:
|
||||||
|
"""Default administrator bootstrap configuration."""
|
||||||
|
|
||||||
|
email: str
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
roles: tuple[str, ...]
|
||||||
|
force_reset: bool
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class SessionSettings:
|
||||||
|
"""Cookie and header configuration for session token transport."""
|
||||||
|
|
||||||
|
access_cookie_name: str
|
||||||
|
refresh_cookie_name: str
|
||||||
|
cookie_secure: bool
|
||||||
|
cookie_domain: Optional[str]
|
||||||
|
cookie_path: str
|
||||||
|
header_name: str
|
||||||
|
header_prefix: str
|
||||||
|
allow_header_fallback: bool
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class Settings:
|
||||||
|
"""Application configuration sourced from environment variables."""
|
||||||
|
|
||||||
|
jwt_secret_key: str = "change-me"
|
||||||
|
jwt_algorithm: str = "HS256"
|
||||||
|
jwt_access_token_minutes: int = 15
|
||||||
|
jwt_refresh_token_days: int = 7
|
||||||
|
session_access_cookie_name: str = "calminer_access_token"
|
||||||
|
session_refresh_cookie_name: str = "calminer_refresh_token"
|
||||||
|
session_cookie_secure: bool = False
|
||||||
|
session_cookie_domain: Optional[str] = None
|
||||||
|
session_cookie_path: str = "/"
|
||||||
|
session_header_name: str = "Authorization"
|
||||||
|
session_header_prefix: str = "Bearer"
|
||||||
|
session_allow_header_fallback: bool = True
|
||||||
|
admin_email: str = "admin@calminer.local"
|
||||||
|
admin_username: str = "admin"
|
||||||
|
admin_password: str = "ChangeMe123!"
|
||||||
|
admin_roles: tuple[str, ...] = ("admin",)
|
||||||
|
admin_force_reset: bool = False
|
||||||
|
pricing_default_payable_pct: float = 100.0
|
||||||
|
pricing_default_currency: str | None = "USD"
|
||||||
|
pricing_moisture_threshold_pct: float = 8.0
|
||||||
|
pricing_moisture_penalty_per_pct: float = 0.0
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_environment(cls) -> "Settings":
|
||||||
|
"""Construct settings from environment variables."""
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
jwt_secret_key=os.getenv("CALMINER_JWT_SECRET", "change-me"),
|
||||||
|
jwt_algorithm=os.getenv("CALMINER_JWT_ALGORITHM", "HS256"),
|
||||||
|
jwt_access_token_minutes=cls._int_from_env(
|
||||||
|
"CALMINER_JWT_ACCESS_MINUTES", 15
|
||||||
|
),
|
||||||
|
jwt_refresh_token_days=cls._int_from_env(
|
||||||
|
"CALMINER_JWT_REFRESH_DAYS", 7
|
||||||
|
),
|
||||||
|
session_access_cookie_name=os.getenv(
|
||||||
|
"CALMINER_SESSION_ACCESS_COOKIE", "calminer_access_token"
|
||||||
|
),
|
||||||
|
session_refresh_cookie_name=os.getenv(
|
||||||
|
"CALMINER_SESSION_REFRESH_COOKIE", "calminer_refresh_token"
|
||||||
|
),
|
||||||
|
session_cookie_secure=cls._bool_from_env(
|
||||||
|
"CALMINER_SESSION_COOKIE_SECURE", False
|
||||||
|
),
|
||||||
|
session_cookie_domain=os.getenv("CALMINER_SESSION_COOKIE_DOMAIN"),
|
||||||
|
session_cookie_path=os.getenv("CALMINER_SESSION_COOKIE_PATH", "/"),
|
||||||
|
session_header_name=os.getenv(
|
||||||
|
"CALMINER_SESSION_HEADER_NAME", "Authorization"
|
||||||
|
),
|
||||||
|
session_header_prefix=os.getenv(
|
||||||
|
"CALMINER_SESSION_HEADER_PREFIX", "Bearer"
|
||||||
|
),
|
||||||
|
session_allow_header_fallback=cls._bool_from_env(
|
||||||
|
"CALMINER_SESSION_ALLOW_HEADER_FALLBACK", True
|
||||||
|
),
|
||||||
|
admin_email=os.getenv(
|
||||||
|
"CALMINER_SEED_ADMIN_EMAIL", "admin@calminer.local"
|
||||||
|
),
|
||||||
|
admin_username=os.getenv(
|
||||||
|
"CALMINER_SEED_ADMIN_USERNAME", "admin"
|
||||||
|
),
|
||||||
|
admin_password=os.getenv(
|
||||||
|
"CALMINER_SEED_ADMIN_PASSWORD", "ChangeMe123!"
|
||||||
|
),
|
||||||
|
admin_roles=cls._parse_admin_roles(
|
||||||
|
os.getenv("CALMINER_SEED_ADMIN_ROLES")
|
||||||
|
),
|
||||||
|
admin_force_reset=cls._bool_from_env(
|
||||||
|
"CALMINER_SEED_FORCE", False
|
||||||
|
),
|
||||||
|
pricing_default_payable_pct=cls._float_from_env(
|
||||||
|
"CALMINER_PRICING_DEFAULT_PAYABLE_PCT", 100.0
|
||||||
|
),
|
||||||
|
pricing_default_currency=cls._optional_str(
|
||||||
|
"CALMINER_PRICING_DEFAULT_CURRENCY", "USD"
|
||||||
|
),
|
||||||
|
pricing_moisture_threshold_pct=cls._float_from_env(
|
||||||
|
"CALMINER_PRICING_MOISTURE_THRESHOLD_PCT", 8.0
|
||||||
|
),
|
||||||
|
pricing_moisture_penalty_per_pct=cls._float_from_env(
|
||||||
|
"CALMINER_PRICING_MOISTURE_PENALTY_PER_PCT", 0.0
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _int_from_env(name: str, default: int) -> int:
|
||||||
|
raw_value = os.getenv(name)
|
||||||
|
if raw_value is None:
|
||||||
|
return default
|
||||||
|
try:
|
||||||
|
return int(raw_value)
|
||||||
|
except ValueError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _bool_from_env(name: str, default: bool) -> bool:
|
||||||
|
raw_value = os.getenv(name)
|
||||||
|
if raw_value is None:
|
||||||
|
return default
|
||||||
|
lowered = raw_value.strip().lower()
|
||||||
|
if lowered in {"1", "true", "yes", "on"}:
|
||||||
|
return True
|
||||||
|
if lowered in {"0", "false", "no", "off"}:
|
||||||
|
return False
|
||||||
|
return default
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_admin_roles(raw_value: str | None) -> tuple[str, ...]:
|
||||||
|
if not raw_value:
|
||||||
|
return ("admin",)
|
||||||
|
parts = [segment.strip()
|
||||||
|
for segment in raw_value.split(",") if segment.strip()]
|
||||||
|
if "admin" not in parts:
|
||||||
|
parts.insert(0, "admin")
|
||||||
|
seen: set[str] = set()
|
||||||
|
ordered: list[str] = []
|
||||||
|
for role_name in parts:
|
||||||
|
if role_name not in seen:
|
||||||
|
ordered.append(role_name)
|
||||||
|
seen.add(role_name)
|
||||||
|
return tuple(ordered)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _float_from_env(name: str, default: float) -> float:
|
||||||
|
raw_value = os.getenv(name)
|
||||||
|
if raw_value is None:
|
||||||
|
return default
|
||||||
|
try:
|
||||||
|
return float(raw_value)
|
||||||
|
except ValueError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _optional_str(name: str, default: str | None = None) -> str | None:
|
||||||
|
raw_value = os.getenv(name)
|
||||||
|
if raw_value is None or raw_value.strip() == "":
|
||||||
|
return default
|
||||||
|
return raw_value.strip()
|
||||||
|
|
||||||
|
def jwt_settings(self) -> JWTSettings:
|
||||||
|
"""Build runtime JWT settings compatible with token helpers."""
|
||||||
|
|
||||||
|
return JWTSettings(
|
||||||
|
secret_key=self.jwt_secret_key,
|
||||||
|
algorithm=self.jwt_algorithm,
|
||||||
|
access_token_ttl=timedelta(minutes=self.jwt_access_token_minutes),
|
||||||
|
refresh_token_ttl=timedelta(days=self.jwt_refresh_token_days),
|
||||||
|
)
|
||||||
|
|
||||||
|
def session_settings(self) -> SessionSettings:
|
||||||
|
"""Provide transport configuration for session tokens."""
|
||||||
|
|
||||||
|
return SessionSettings(
|
||||||
|
access_cookie_name=self.session_access_cookie_name,
|
||||||
|
refresh_cookie_name=self.session_refresh_cookie_name,
|
||||||
|
cookie_secure=self.session_cookie_secure,
|
||||||
|
cookie_domain=self.session_cookie_domain,
|
||||||
|
cookie_path=self.session_cookie_path,
|
||||||
|
header_name=self.session_header_name,
|
||||||
|
header_prefix=self.session_header_prefix,
|
||||||
|
allow_header_fallback=self.session_allow_header_fallback,
|
||||||
|
)
|
||||||
|
|
||||||
|
def admin_bootstrap_settings(self) -> AdminBootstrapSettings:
|
||||||
|
"""Return configured admin bootstrap settings."""
|
||||||
|
|
||||||
|
return AdminBootstrapSettings(
|
||||||
|
email=self.admin_email,
|
||||||
|
username=self.admin_username,
|
||||||
|
password=self.admin_password,
|
||||||
|
roles=self.admin_roles,
|
||||||
|
force_reset=self.admin_force_reset,
|
||||||
|
)
|
||||||
|
|
||||||
|
def pricing_metadata(self) -> PricingMetadata:
|
||||||
|
"""Build pricing metadata defaults."""
|
||||||
|
|
||||||
|
return PricingMetadata(
|
||||||
|
default_payable_pct=self.pricing_default_payable_pct,
|
||||||
|
default_currency=self.pricing_default_currency,
|
||||||
|
moisture_threshold_pct=self.pricing_moisture_threshold_pct,
|
||||||
|
moisture_penalty_per_pct=self.pricing_moisture_penalty_per_pct,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_settings() -> Settings:
|
||||||
|
"""Return cached application settings."""
|
||||||
|
|
||||||
|
return Settings.from_environment()
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
# Copy this file to config/setup_production.env and replace values with production secrets
|
|
||||||
|
|
||||||
# Container image and runtime configuration
|
|
||||||
CALMINER_IMAGE=registry.example.com/calminer/api:latest
|
|
||||||
CALMINER_DOMAIN=calminer.example.com
|
|
||||||
TRAEFIK_ACME_EMAIL=ops@example.com
|
|
||||||
CALMINER_API_PORT=8000
|
|
||||||
UVICORN_WORKERS=4
|
|
||||||
UVICORN_LOG_LEVEL=info
|
|
||||||
CALMINER_NETWORK=calminer_backend
|
|
||||||
API_LIMIT_CPUS=1.0
|
|
||||||
API_LIMIT_MEMORY=1g
|
|
||||||
API_RESERVATION_MEMORY=512m
|
|
||||||
TRAEFIK_LIMIT_CPUS=0.5
|
|
||||||
TRAEFIK_LIMIT_MEMORY=512m
|
|
||||||
POSTGRES_LIMIT_CPUS=1.0
|
|
||||||
POSTGRES_LIMIT_MEMORY=2g
|
|
||||||
POSTGRES_RESERVATION_MEMORY=1g
|
|
||||||
|
|
||||||
# Application database connection
|
|
||||||
DATABASE_DRIVER=postgresql+psycopg2
|
|
||||||
DATABASE_HOST=production-db.internal
|
|
||||||
DATABASE_PORT=5432
|
|
||||||
DATABASE_NAME=calminer
|
|
||||||
DATABASE_USER=calminer_app
|
|
||||||
DATABASE_PASSWORD=ChangeMe123!
|
|
||||||
DATABASE_SCHEMA=public
|
|
||||||
|
|
||||||
# Optional consolidated SQLAlchemy URL (overrides granular settings when set)
|
|
||||||
# DATABASE_URL=postgresql+psycopg2://calminer_app:ChangeMe123!@production-db.internal:5432/calminer
|
|
||||||
|
|
||||||
# Superuser credentials used by scripts/setup_database.py for migrations/seed data
|
|
||||||
DATABASE_SUPERUSER=postgres
|
|
||||||
DATABASE_SUPERUSER_PASSWORD=ChangeMeSuper123!
|
|
||||||
DATABASE_SUPERUSER_DB=postgres
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
# Sample environment configuration for staging deployment
|
|
||||||
DATABASE_HOST=staging-db.internal
|
|
||||||
DATABASE_PORT=5432
|
|
||||||
DATABASE_NAME=calminer_staging
|
|
||||||
DATABASE_USER=calminer_app
|
|
||||||
DATABASE_PASSWORD=<app-password>
|
|
||||||
|
|
||||||
# Admin connection used for provisioning database and roles
|
|
||||||
DATABASE_SUPERUSER=postgres
|
|
||||||
DATABASE_SUPERUSER_PASSWORD=<admin-password>
|
|
||||||
DATABASE_SUPERUSER_DB=postgres
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Sample environment configuration for running scripts/setup_database.py against a test instance
|
|
||||||
DATABASE_DRIVER=postgresql
|
|
||||||
DATABASE_HOST=postgres
|
|
||||||
DATABASE_PORT=5432
|
|
||||||
DATABASE_NAME=calminer_test
|
|
||||||
DATABASE_USER=calminer_test
|
|
||||||
DATABASE_PASSWORD=<test-password>
|
|
||||||
# optional: specify schema if different from 'public'
|
|
||||||
#DATABASE_SCHEMA=public
|
|
||||||
|
|
||||||
# Admin connection used for provisioning database and roles
|
|
||||||
DATABASE_SUPERUSER=postgres
|
|
||||||
DATABASE_SUPERUSER_PASSWORD=<superuser-password>
|
|
||||||
DATABASE_SUPERUSER_DB=postgres
|
|
||||||
400
dependencies.py
Normal file
400
dependencies.py
Normal file
@@ -0,0 +1,400 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable, Iterable, Generator
|
||||||
|
|
||||||
|
from fastapi import Depends, HTTPException, Request, status
|
||||||
|
|
||||||
|
from config.settings import Settings, get_settings
|
||||||
|
from models import Project, Role, Scenario, User
|
||||||
|
from services.authorization import (
|
||||||
|
ensure_project_access as ensure_project_access_helper,
|
||||||
|
ensure_scenario_access as ensure_scenario_access_helper,
|
||||||
|
ensure_scenario_in_project as ensure_scenario_in_project_helper,
|
||||||
|
)
|
||||||
|
from services.exceptions import AuthorizationError, EntityNotFoundError
|
||||||
|
from services.security import JWTSettings
|
||||||
|
from services.session import (
|
||||||
|
AuthSession,
|
||||||
|
SessionStrategy,
|
||||||
|
SessionTokens,
|
||||||
|
build_session_strategy,
|
||||||
|
extract_session_tokens,
|
||||||
|
)
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from services.importers import ImportIngestionService
|
||||||
|
from services.pricing import PricingMetadata
|
||||||
|
from services.navigation import NavigationService
|
||||||
|
from services.scenario_evaluation import ScenarioPricingConfig, ScenarioPricingEvaluator
|
||||||
|
from services.repositories import pricing_settings_to_metadata
|
||||||
|
|
||||||
|
|
||||||
|
def get_unit_of_work() -> Generator[UnitOfWork, None, None]:
|
||||||
|
"""FastAPI dependency yielding a unit-of-work instance."""
|
||||||
|
|
||||||
|
with UnitOfWork() as uow:
|
||||||
|
yield uow
|
||||||
|
|
||||||
|
|
||||||
|
_IMPORT_INGESTION_SERVICE = ImportIngestionService(lambda: UnitOfWork())
|
||||||
|
|
||||||
|
|
||||||
|
def get_import_ingestion_service() -> ImportIngestionService:
|
||||||
|
"""Provide singleton import ingestion service."""
|
||||||
|
|
||||||
|
return _IMPORT_INGESTION_SERVICE
|
||||||
|
|
||||||
|
|
||||||
|
def get_application_settings() -> Settings:
|
||||||
|
"""Provide cached application settings instance."""
|
||||||
|
|
||||||
|
return get_settings()
|
||||||
|
|
||||||
|
|
||||||
|
def get_pricing_metadata(
|
||||||
|
settings: Settings = Depends(get_application_settings),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> PricingMetadata:
|
||||||
|
"""Return pricing metadata defaults sourced from persisted pricing settings."""
|
||||||
|
|
||||||
|
stored = uow.get_pricing_metadata()
|
||||||
|
if stored is not None:
|
||||||
|
return stored
|
||||||
|
|
||||||
|
fallback = settings.pricing_metadata()
|
||||||
|
seed_result = uow.ensure_default_pricing_settings(metadata=fallback)
|
||||||
|
return pricing_settings_to_metadata(seed_result.settings)
|
||||||
|
|
||||||
|
|
||||||
|
def get_navigation_service(
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> NavigationService:
|
||||||
|
if not uow.navigation:
|
||||||
|
raise RuntimeError("Navigation repository is not initialised")
|
||||||
|
return NavigationService(uow.navigation)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pricing_evaluator(
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
) -> ScenarioPricingEvaluator:
|
||||||
|
"""Provide a configured scenario pricing evaluator."""
|
||||||
|
|
||||||
|
return ScenarioPricingEvaluator(ScenarioPricingConfig(metadata=metadata))
|
||||||
|
|
||||||
|
|
||||||
|
def get_jwt_settings() -> JWTSettings:
|
||||||
|
"""Provide JWT runtime configuration derived from settings."""
|
||||||
|
|
||||||
|
return get_settings().jwt_settings()
|
||||||
|
|
||||||
|
|
||||||
|
def get_session_strategy(
|
||||||
|
settings: Settings = Depends(get_application_settings),
|
||||||
|
) -> SessionStrategy:
|
||||||
|
"""Yield configured session transport strategy."""
|
||||||
|
|
||||||
|
return build_session_strategy(settings.session_settings())
|
||||||
|
|
||||||
|
|
||||||
|
def get_session_tokens(
|
||||||
|
request: Request,
|
||||||
|
strategy: SessionStrategy = Depends(get_session_strategy),
|
||||||
|
) -> SessionTokens:
|
||||||
|
"""Extract raw session tokens from the incoming request."""
|
||||||
|
|
||||||
|
existing = getattr(request.state, "auth_session", None)
|
||||||
|
if isinstance(existing, AuthSession):
|
||||||
|
return existing.tokens
|
||||||
|
|
||||||
|
tokens = extract_session_tokens(request, strategy)
|
||||||
|
request.state.auth_session = AuthSession(tokens=tokens)
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_session(
|
||||||
|
request: Request,
|
||||||
|
tokens: SessionTokens = Depends(get_session_tokens),
|
||||||
|
) -> AuthSession:
|
||||||
|
"""Provide authentication session context for the current request."""
|
||||||
|
|
||||||
|
existing = getattr(request.state, "auth_session", None)
|
||||||
|
if isinstance(existing, AuthSession):
|
||||||
|
return existing
|
||||||
|
|
||||||
|
if tokens.is_empty:
|
||||||
|
session = AuthSession.anonymous()
|
||||||
|
else:
|
||||||
|
session = AuthSession(tokens=tokens)
|
||||||
|
request.state.auth_session = session
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_user(
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
) -> User | None:
|
||||||
|
"""Return the current authenticated user if present."""
|
||||||
|
|
||||||
|
return session.user
|
||||||
|
|
||||||
|
|
||||||
|
def require_current_user(
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
) -> User:
|
||||||
|
"""Ensure that a request is authenticated and return the user context."""
|
||||||
|
|
||||||
|
if session.user is None or session.tokens.is_empty:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authentication required.",
|
||||||
|
)
|
||||||
|
return session.user
|
||||||
|
|
||||||
|
|
||||||
|
def require_authenticated_user(
|
||||||
|
user: User = Depends(require_current_user),
|
||||||
|
) -> User:
|
||||||
|
"""Ensure the current user account is active."""
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="User account is disabled.",
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def require_authenticated_user_html(
|
||||||
|
request: Request,
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
) -> User:
|
||||||
|
"""HTML-aware authenticated dependency that redirects anonymous sessions."""
|
||||||
|
|
||||||
|
user = session.user
|
||||||
|
if user is None or session.tokens.is_empty:
|
||||||
|
login_url = str(request.url_for("auth.login_form"))
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
headers={"Location": login_url},
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="User account is disabled.",
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def _user_role_names(user: User) -> set[str]:
|
||||||
|
roles: Iterable[Role] = getattr(user, "roles", []) or []
|
||||||
|
return {role.name for role in roles}
|
||||||
|
|
||||||
|
|
||||||
|
def require_roles(*roles: str) -> Callable[[User], User]:
|
||||||
|
"""Dependency factory enforcing membership in one of the given roles."""
|
||||||
|
|
||||||
|
required = tuple(role.strip() for role in roles if role.strip())
|
||||||
|
if not required:
|
||||||
|
raise ValueError("require_roles requires at least one role name")
|
||||||
|
|
||||||
|
def _dependency(user: User = Depends(require_authenticated_user)) -> User:
|
||||||
|
if user.is_superuser:
|
||||||
|
return user
|
||||||
|
|
||||||
|
role_names = _user_role_names(user)
|
||||||
|
if not any(role in role_names for role in required):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Insufficient permissions for this action.",
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
return _dependency
|
||||||
|
|
||||||
|
|
||||||
|
def require_any_role(*roles: str) -> Callable[[User], User]:
|
||||||
|
"""Alias of require_roles for readability in some contexts."""
|
||||||
|
|
||||||
|
return require_roles(*roles)
|
||||||
|
|
||||||
|
|
||||||
|
def require_roles_html(*roles: str) -> Callable[[Request], User]:
|
||||||
|
"""Ensure user is authenticated for HTML responses; redirect anonymous to login."""
|
||||||
|
|
||||||
|
required = tuple(role.strip() for role in roles if role.strip())
|
||||||
|
if not required:
|
||||||
|
raise ValueError("require_roles_html requires at least one role name")
|
||||||
|
|
||||||
|
def _dependency(
|
||||||
|
request: Request,
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
) -> User:
|
||||||
|
user = session.user
|
||||||
|
if user is None:
|
||||||
|
login_url = str(request.url_for("auth.login_form"))
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
headers={"Location": login_url},
|
||||||
|
)
|
||||||
|
|
||||||
|
if user.is_superuser:
|
||||||
|
return user
|
||||||
|
|
||||||
|
role_names = _user_role_names(user)
|
||||||
|
if not any(role in role_names for role in required):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Insufficient permissions for this action.",
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
return _dependency
|
||||||
|
|
||||||
|
|
||||||
|
def require_any_role_html(*roles: str) -> Callable[[Request], User]:
|
||||||
|
"""Alias of require_roles_html for readability."""
|
||||||
|
|
||||||
|
return require_roles_html(*roles)
|
||||||
|
|
||||||
|
|
||||||
|
def require_project_resource(
|
||||||
|
*,
|
||||||
|
require_manage: bool = False,
|
||||||
|
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||||
|
) -> Callable[[int], Project]:
|
||||||
|
"""Dependency factory that resolves a project with authorization checks."""
|
||||||
|
|
||||||
|
def _dependency(
|
||||||
|
project_id: int,
|
||||||
|
user: User = Depends(user_dependency),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> Project:
|
||||||
|
try:
|
||||||
|
return ensure_project_access_helper(
|
||||||
|
uow,
|
||||||
|
project_id=project_id,
|
||||||
|
user=user,
|
||||||
|
require_manage=require_manage,
|
||||||
|
)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
except AuthorizationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return _dependency
|
||||||
|
|
||||||
|
|
||||||
|
def require_scenario_resource(
|
||||||
|
*,
|
||||||
|
require_manage: bool = False,
|
||||||
|
with_children: bool = False,
|
||||||
|
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||||
|
) -> Callable[[int], Scenario]:
|
||||||
|
"""Dependency factory that resolves a scenario with authorization checks."""
|
||||||
|
|
||||||
|
def _dependency(
|
||||||
|
scenario_id: int,
|
||||||
|
user: User = Depends(user_dependency),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> Scenario:
|
||||||
|
try:
|
||||||
|
return ensure_scenario_access_helper(
|
||||||
|
uow,
|
||||||
|
scenario_id=scenario_id,
|
||||||
|
user=user,
|
||||||
|
require_manage=require_manage,
|
||||||
|
with_children=with_children,
|
||||||
|
)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
except AuthorizationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return _dependency
|
||||||
|
|
||||||
|
|
||||||
|
def require_project_scenario_resource(
|
||||||
|
*,
|
||||||
|
require_manage: bool = False,
|
||||||
|
with_children: bool = False,
|
||||||
|
user_dependency: Callable[..., User] = require_authenticated_user,
|
||||||
|
) -> Callable[[int, int], Scenario]:
|
||||||
|
"""Dependency factory ensuring a scenario belongs to the given project and is accessible."""
|
||||||
|
|
||||||
|
def _dependency(
|
||||||
|
project_id: int,
|
||||||
|
scenario_id: int,
|
||||||
|
user: User = Depends(user_dependency),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> Scenario:
|
||||||
|
try:
|
||||||
|
return ensure_scenario_in_project_helper(
|
||||||
|
uow,
|
||||||
|
project_id=project_id,
|
||||||
|
scenario_id=scenario_id,
|
||||||
|
user=user,
|
||||||
|
require_manage=require_manage,
|
||||||
|
with_children=with_children,
|
||||||
|
)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
except AuthorizationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return _dependency
|
||||||
|
|
||||||
|
|
||||||
|
def require_project_resource_html(
|
||||||
|
*, require_manage: bool = False
|
||||||
|
) -> Callable[[int], Project]:
|
||||||
|
"""HTML-aware project loader that redirects anonymous sessions."""
|
||||||
|
|
||||||
|
return require_project_resource(
|
||||||
|
require_manage=require_manage,
|
||||||
|
user_dependency=require_authenticated_user_html,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def require_scenario_resource_html(
|
||||||
|
*,
|
||||||
|
require_manage: bool = False,
|
||||||
|
with_children: bool = False,
|
||||||
|
) -> Callable[[int], Scenario]:
|
||||||
|
"""HTML-aware scenario loader that redirects anonymous sessions."""
|
||||||
|
|
||||||
|
return require_scenario_resource(
|
||||||
|
require_manage=require_manage,
|
||||||
|
with_children=with_children,
|
||||||
|
user_dependency=require_authenticated_user_html,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def require_project_scenario_resource_html(
|
||||||
|
*,
|
||||||
|
require_manage: bool = False,
|
||||||
|
with_children: bool = False,
|
||||||
|
) -> Callable[[int, int], Scenario]:
|
||||||
|
"""HTML-aware project-scenario loader redirecting anonymous sessions."""
|
||||||
|
|
||||||
|
return require_project_scenario_resource(
|
||||||
|
require_manage=require_manage,
|
||||||
|
with_children=with_children,
|
||||||
|
user_dependency=require_authenticated_user_html,
|
||||||
|
)
|
||||||
59
docker-compose.override.yml
Normal file
59
docker-compose.override.yml
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
args:
|
||||||
|
APT_CACHE_URL: ${APT_CACHE_URL:-}
|
||||||
|
environment:
|
||||||
|
- ENVIRONMENT=development
|
||||||
|
- DEBUG=true
|
||||||
|
- LOG_LEVEL=DEBUG
|
||||||
|
# Override database to use local postgres service
|
||||||
|
- DATABASE_HOST=postgres
|
||||||
|
- DATABASE_PORT=5432
|
||||||
|
- DATABASE_USER=calminer
|
||||||
|
- DATABASE_PASSWORD=calminer_password
|
||||||
|
- DATABASE_NAME=calminer_db
|
||||||
|
- DATABASE_DRIVER=postgresql
|
||||||
|
# Development-specific settings
|
||||||
|
- CALMINER_EXPORT_MAX_ROWS=1000
|
||||||
|
- CALMINER_IMPORT_MAX_ROWS=10000
|
||||||
|
volumes:
|
||||||
|
# Mount source code for live reloading (if using --reload)
|
||||||
|
- .:/app:ro
|
||||||
|
# Override logs volume to local for easier access
|
||||||
|
- ./logs:/app/logs
|
||||||
|
ports:
|
||||||
|
- "8003:8003"
|
||||||
|
# Override command for development with reload
|
||||||
|
command:
|
||||||
|
[
|
||||||
|
"main:app",
|
||||||
|
"--host",
|
||||||
|
"0.0.0.0",
|
||||||
|
"--port",
|
||||||
|
"8003",
|
||||||
|
"--reload",
|
||||||
|
"--workers",
|
||||||
|
"1",
|
||||||
|
]
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=calminer
|
||||||
|
- POSTGRES_PASSWORD=calminer_password
|
||||||
|
- POSTGRES_DB=calminer_db
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
73
docker-compose.prod.yml
Normal file
73
docker-compose.prod.yml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: git.allucanget.biz/allucanget/calminer:latest
|
||||||
|
environment:
|
||||||
|
- ENVIRONMENT=production
|
||||||
|
- DEBUG=false
|
||||||
|
- LOG_LEVEL=WARNING
|
||||||
|
# Database configuration - must be provided externally
|
||||||
|
- DATABASE_HOST=${DATABASE_HOST}
|
||||||
|
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||||
|
- DATABASE_USER=${DATABASE_USER}
|
||||||
|
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||||
|
- DATABASE_NAME=${DATABASE_NAME}
|
||||||
|
- DATABASE_DRIVER=postgresql
|
||||||
|
# Production-specific settings
|
||||||
|
- CALMINER_EXPORT_MAX_ROWS=100000
|
||||||
|
- CALMINER_IMPORT_MAX_ROWS=100000
|
||||||
|
- CALMINER_EXPORT_METADATA=true
|
||||||
|
- CALMINER_IMPORT_STAGING_TTL=3600
|
||||||
|
ports:
|
||||||
|
- "8003:8003"
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
restart: unless-stopped
|
||||||
|
# Production health checks
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8003/health"]
|
||||||
|
interval: 60s
|
||||||
|
timeout: 30s
|
||||||
|
retries: 5
|
||||||
|
start_period: 60s
|
||||||
|
# Resource limits for production
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "1.0"
|
||||||
|
memory: 1G
|
||||||
|
reservations:
|
||||||
|
cpus: "0.5"
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=${DATABASE_USER}
|
||||||
|
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||||
|
- POSTGRES_DB=${DATABASE_NAME}
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
restart: unless-stopped
|
||||||
|
# Production postgres health check
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${DATABASE_USER} -d ${DATABASE_NAME}"]
|
||||||
|
interval: 60s
|
||||||
|
timeout: 30s
|
||||||
|
retries: 5
|
||||||
|
start_period: 60s
|
||||||
|
# Resource limits for postgres
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "1.0"
|
||||||
|
memory: 2G
|
||||||
|
reservations:
|
||||||
|
cpus: "0.5"
|
||||||
|
memory: 1G
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
62
docker-compose.staging.yml
Normal file
62
docker-compose.staging.yml
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
args:
|
||||||
|
APT_CACHE_URL: ${APT_CACHE_URL:-}
|
||||||
|
environment:
|
||||||
|
- ENVIRONMENT=staging
|
||||||
|
- DEBUG=false
|
||||||
|
- LOG_LEVEL=INFO
|
||||||
|
# Database configuration - can be overridden by external env
|
||||||
|
- DATABASE_HOST=${DATABASE_HOST:-postgres}
|
||||||
|
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||||
|
- DATABASE_USER=${DATABASE_USER:-calminer}
|
||||||
|
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||||
|
- DATABASE_NAME=${DATABASE_NAME:-calminer_db}
|
||||||
|
- DATABASE_DRIVER=postgresql
|
||||||
|
# Staging-specific settings
|
||||||
|
- CALMINER_EXPORT_MAX_ROWS=50000
|
||||||
|
- CALMINER_IMPORT_MAX_ROWS=50000
|
||||||
|
- CALMINER_EXPORT_METADATA=true
|
||||||
|
- CALMINER_IMPORT_STAGING_TTL=600
|
||||||
|
ports:
|
||||||
|
- "8003:8003"
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
# Health check for staging
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8003/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=${DATABASE_USER:-calminer}
|
||||||
|
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||||
|
- POSTGRES_DB=${DATABASE_NAME:-calminer_db}
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
restart: unless-stopped
|
||||||
|
# Health check for postgres
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD-SHELL",
|
||||||
|
"pg_isready -U ${DATABASE_USER:-calminer} -d ${DATABASE_NAME:-calminer_db}",
|
||||||
|
]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 30s
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
version: "3.8"
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
build:
|
build:
|
||||||
@@ -8,11 +6,13 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8003:8003"
|
- "8003:8003"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_HOST=postgres
|
# Environment-specific variables should be set in override files
|
||||||
- DATABASE_PORT=5432
|
- ENVIRONMENT=${ENVIRONMENT:-production}
|
||||||
- DATABASE_USER=calminer
|
- DATABASE_HOST=${DATABASE_HOST:-postgres}
|
||||||
- DATABASE_PASSWORD=calminer_password
|
- DATABASE_PORT=${DATABASE_PORT:-5432}
|
||||||
- DATABASE_NAME=calminer_db
|
- DATABASE_USER=${DATABASE_USER}
|
||||||
|
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||||
|
- DATABASE_NAME=${DATABASE_NAME}
|
||||||
- DATABASE_DRIVER=postgresql
|
- DATABASE_DRIVER=postgresql
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
@@ -23,9 +23,9 @@ services:
|
|||||||
postgres:
|
postgres:
|
||||||
image: postgres:17
|
image: postgres:17
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=calminer
|
- POSTGRES_USER=${DATABASE_USER}
|
||||||
- POSTGRES_PASSWORD=calminer_password
|
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
|
||||||
- POSTGRES_DB=calminer_db
|
- POSTGRES_DB=${DATABASE_NAME}
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
14
k8s/configmap.yaml
Normal file
14
k8s/configmap.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: calminer-config
|
||||||
|
data:
|
||||||
|
DATABASE_HOST: "calminer-db"
|
||||||
|
DATABASE_PORT: "5432"
|
||||||
|
DATABASE_USER: "calminer"
|
||||||
|
DATABASE_NAME: "calminer_db"
|
||||||
|
DATABASE_DRIVER: "postgresql"
|
||||||
|
CALMINER_EXPORT_MAX_ROWS: "10000"
|
||||||
|
CALMINER_EXPORT_METADATA: "true"
|
||||||
|
CALMINER_IMPORT_STAGING_TTL: "300"
|
||||||
|
CALMINER_IMPORT_MAX_ROWS: "50000"
|
||||||
54
k8s/deployment.yaml
Normal file
54
k8s/deployment.yaml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: calminer-app
|
||||||
|
labels:
|
||||||
|
app: calminer
|
||||||
|
spec:
|
||||||
|
replicas: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: calminer
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: calminer
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: calminer
|
||||||
|
image: registry.example.com/calminer:latest
|
||||||
|
ports:
|
||||||
|
- containerPort: 8003
|
||||||
|
envFrom:
|
||||||
|
- configMapRef:
|
||||||
|
name: calminer-config
|
||||||
|
- secretRef:
|
||||||
|
name: calminer-secrets
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "256Mi"
|
||||||
|
cpu: "250m"
|
||||||
|
limits:
|
||||||
|
memory: "512Mi"
|
||||||
|
cpu: "500m"
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: 8003
|
||||||
|
initialDelaySeconds: 30
|
||||||
|
periodSeconds: 10
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: 8003
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
initContainers:
|
||||||
|
- name: wait-for-db
|
||||||
|
image: postgres:17
|
||||||
|
command:
|
||||||
|
[
|
||||||
|
"sh",
|
||||||
|
"-c",
|
||||||
|
"until pg_isready -h calminer-db -p 5432; do echo waiting for database; sleep 2; done;",
|
||||||
|
]
|
||||||
18
k8s/ingress.yaml
Normal file
18
k8s/ingress.yaml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: calminer-ingress
|
||||||
|
annotations:
|
||||||
|
nginx.ingress.kubernetes.io/rewrite-target: /
|
||||||
|
spec:
|
||||||
|
rules:
|
||||||
|
- host: calminer.example.com
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: calminer-service
|
||||||
|
port:
|
||||||
|
number: 80
|
||||||
13
k8s/postgres-service.yaml
Normal file
13
k8s/postgres-service.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: calminer-db
|
||||||
|
labels:
|
||||||
|
app: calminer-db
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
app: calminer-db
|
||||||
|
ports:
|
||||||
|
- port: 5432
|
||||||
|
targetPort: 5432
|
||||||
|
clusterIP: None # Headless service for StatefulSet
|
||||||
48
k8s/postgres.yaml
Normal file
48
k8s/postgres.yaml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: StatefulSet
|
||||||
|
metadata:
|
||||||
|
name: calminer-db
|
||||||
|
spec:
|
||||||
|
serviceName: calminer-db
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: calminer-db
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: calminer-db
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: postgres
|
||||||
|
image: postgres:17
|
||||||
|
ports:
|
||||||
|
- containerPort: 5432
|
||||||
|
env:
|
||||||
|
- name: POSTGRES_USER
|
||||||
|
value: "calminer"
|
||||||
|
- name: POSTGRES_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: calminer-secrets
|
||||||
|
key: DATABASE_PASSWORD
|
||||||
|
- name: POSTGRES_DB
|
||||||
|
value: "calminer_db"
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "256Mi"
|
||||||
|
cpu: "250m"
|
||||||
|
limits:
|
||||||
|
memory: "512Mi"
|
||||||
|
cpu: "500m"
|
||||||
|
volumeMounts:
|
||||||
|
- name: postgres-storage
|
||||||
|
mountPath: /var/lib/postgresql/data
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: postgres-storage
|
||||||
|
spec:
|
||||||
|
accessModes: ["ReadWriteOnce"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 10Gi
|
||||||
8
k8s/secret.yaml
Normal file
8
k8s/secret.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: calminer-secrets
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
DATABASE_PASSWORD: Y2FsbWluZXJfcGFzc3dvcmQ= # base64 encoded 'calminer_password'
|
||||||
|
CALMINER_SEED_ADMIN_PASSWORD: Q2hhbmdlTWUxMjMh # base64 encoded 'ChangeMe123!'
|
||||||
14
k8s/service.yaml
Normal file
14
k8s/service.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: calminer-service
|
||||||
|
labels:
|
||||||
|
app: calminer
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
app: calminer
|
||||||
|
ports:
|
||||||
|
- port: 80
|
||||||
|
targetPort: 8003
|
||||||
|
protocol: TCP
|
||||||
|
type: ClusterIP
|
||||||
125
main.py
125
main.py
@@ -1,28 +1,88 @@
|
|||||||
from routes.distributions import router as distributions_router
|
import logging
|
||||||
from routes.ui import router as ui_router
|
from contextlib import asynccontextmanager
|
||||||
from routes.parameters import router as parameters_router
|
|
||||||
from typing import Awaitable, Callable
|
from typing import Awaitable, Callable
|
||||||
|
|
||||||
from fastapi import FastAPI, Request, Response
|
from fastapi import FastAPI, Request, Response
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
from fastapi.responses import FileResponse
|
||||||
|
|
||||||
|
from config.settings import get_settings
|
||||||
|
from middleware.auth_session import AuthSessionMiddleware
|
||||||
|
from middleware.metrics import MetricsMiddleware
|
||||||
from middleware.validation import validate_json
|
from middleware.validation import validate_json
|
||||||
from config.database import Base, engine
|
from routes.auth import router as auth_router
|
||||||
|
from routes.dashboard import router as dashboard_router
|
||||||
|
from routes.calculations import router as calculations_router
|
||||||
|
from routes.imports import router as imports_router
|
||||||
|
from routes.exports import router as exports_router
|
||||||
|
from routes.projects import router as projects_router
|
||||||
|
from routes.reports import router as reports_router
|
||||||
from routes.scenarios import router as scenarios_router
|
from routes.scenarios import router as scenarios_router
|
||||||
from routes.costs import router as costs_router
|
from routes.ui import router as ui_router
|
||||||
from routes.consumption import router as consumption_router
|
from routes.navigation import router as navigation_router
|
||||||
from routes.production import router as production_router
|
from monitoring import router as monitoring_router
|
||||||
from routes.equipment import router as equipment_router
|
from services.bootstrap import bootstrap_admin, bootstrap_pricing_settings
|
||||||
from routes.reporting import router as reporting_router
|
from scripts.init_db import init_db as init_db_script
|
||||||
from routes.currencies import router as currencies_router
|
|
||||||
from routes.simulations import router as simulations_router
|
|
||||||
from routes.maintenance import router as maintenance_router
|
|
||||||
from routes.settings import router as settings_router
|
|
||||||
from routes.users import router as users_router
|
|
||||||
|
|
||||||
# Initialize database schema
|
logger = logging.getLogger(__name__)
|
||||||
Base.metadata.create_all(bind=engine)
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
|
async def _bootstrap_startup() -> None:
|
||||||
|
settings = get_settings()
|
||||||
|
admin_settings = settings.admin_bootstrap_settings()
|
||||||
|
pricing_metadata = settings.pricing_metadata()
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
init_db_script()
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
"DB initializer failed; continuing to bootstrap (non-fatal)")
|
||||||
|
|
||||||
|
role_result, admin_result = bootstrap_admin(settings=admin_settings)
|
||||||
|
pricing_result = bootstrap_pricing_settings(metadata=pricing_metadata)
|
||||||
|
logger.info(
|
||||||
|
"Admin bootstrap completed: roles=%s created=%s updated=%s rotated=%s assigned=%s",
|
||||||
|
role_result.ensured,
|
||||||
|
admin_result.created_user,
|
||||||
|
admin_result.updated_user,
|
||||||
|
admin_result.password_rotated,
|
||||||
|
admin_result.roles_granted,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
seed = pricing_result.seed
|
||||||
|
slug = getattr(seed.settings, "slug", None) if seed and getattr(
|
||||||
|
seed, "settings", None) else None
|
||||||
|
created = getattr(seed, "created", None)
|
||||||
|
updated_fields = getattr(seed, "updated_fields", None)
|
||||||
|
impurity_upserts = getattr(seed, "impurity_upserts", None)
|
||||||
|
logger.info(
|
||||||
|
"Pricing settings bootstrap completed: slug=%s created=%s updated_fields=%s impurity_upserts=%s projects_assigned=%s",
|
||||||
|
slug,
|
||||||
|
created,
|
||||||
|
updated_fields,
|
||||||
|
impurity_upserts,
|
||||||
|
pricing_result.projects_assigned,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.info(
|
||||||
|
"Pricing settings bootstrap completed (partial): projects_assigned=%s",
|
||||||
|
pricing_result.projects_assigned,
|
||||||
|
)
|
||||||
|
except Exception: # pragma: no cover - defensive logging
|
||||||
|
logger.exception(
|
||||||
|
"Failed to bootstrap administrator or pricing settings")
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def app_lifespan(_: FastAPI):
|
||||||
|
await _bootstrap_startup()
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI(lifespan=app_lifespan)
|
||||||
|
|
||||||
|
app.add_middleware(AuthSessionMiddleware)
|
||||||
|
app.add_middleware(MetricsMiddleware)
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("http")
|
@app.middleware("http")
|
||||||
@@ -37,20 +97,23 @@ async def health() -> dict[str, str]:
|
|||||||
return {"status": "ok"}
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
@app.get("/favicon.ico", include_in_schema=False)
|
||||||
|
async def favicon() -> Response:
|
||||||
|
static_directory = "static"
|
||||||
|
favicon_img = "favicon.ico"
|
||||||
|
return FileResponse(f"{static_directory}/{favicon_img}")
|
||||||
|
|
||||||
# Include API routers
|
|
||||||
|
app.include_router(dashboard_router)
|
||||||
|
app.include_router(calculations_router)
|
||||||
|
app.include_router(auth_router)
|
||||||
|
app.include_router(imports_router)
|
||||||
|
app.include_router(exports_router)
|
||||||
|
app.include_router(projects_router)
|
||||||
app.include_router(scenarios_router)
|
app.include_router(scenarios_router)
|
||||||
app.include_router(parameters_router)
|
app.include_router(reports_router)
|
||||||
app.include_router(distributions_router)
|
|
||||||
app.include_router(costs_router)
|
|
||||||
app.include_router(consumption_router)
|
|
||||||
app.include_router(simulations_router)
|
|
||||||
app.include_router(production_router)
|
|
||||||
app.include_router(equipment_router)
|
|
||||||
app.include_router(maintenance_router)
|
|
||||||
app.include_router(reporting_router)
|
|
||||||
app.include_router(currencies_router)
|
|
||||||
app.include_router(settings_router)
|
|
||||||
app.include_router(ui_router)
|
app.include_router(ui_router)
|
||||||
app.include_router(users_router)
|
app.include_router(monitoring_router)
|
||||||
|
app.include_router(navigation_router)
|
||||||
|
|
||||||
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||||
|
|||||||
218
middleware/auth_session.py
Normal file
218
middleware/auth_session.py
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Iterable, Optional
|
||||||
|
|
||||||
|
from fastapi import Request, Response
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||||
|
from starlette.types import ASGIApp
|
||||||
|
|
||||||
|
from config.settings import Settings, get_settings
|
||||||
|
from sqlalchemy.orm.exc import DetachedInstanceError
|
||||||
|
from models import User
|
||||||
|
from monitoring.metrics import ACTIVE_CONNECTIONS
|
||||||
|
from services.exceptions import EntityNotFoundError
|
||||||
|
from services.security import (
|
||||||
|
JWTSettings,
|
||||||
|
TokenDecodeError,
|
||||||
|
TokenError,
|
||||||
|
TokenExpiredError,
|
||||||
|
TokenTypeMismatchError,
|
||||||
|
create_access_token,
|
||||||
|
create_refresh_token,
|
||||||
|
decode_access_token,
|
||||||
|
decode_refresh_token,
|
||||||
|
)
|
||||||
|
from services.session import (
|
||||||
|
AuthSession,
|
||||||
|
SessionStrategy,
|
||||||
|
SessionTokens,
|
||||||
|
build_session_strategy,
|
||||||
|
clear_session_cookies,
|
||||||
|
extract_session_tokens,
|
||||||
|
set_session_cookies,
|
||||||
|
)
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
|
||||||
|
_AUTH_SCOPE = "auth"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class _ResolutionResult:
|
||||||
|
session: AuthSession
|
||||||
|
strategy: SessionStrategy
|
||||||
|
jwt_settings: JWTSettings
|
||||||
|
|
||||||
|
|
||||||
|
class AuthSessionMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Resolve authenticated users from session cookies and refresh tokens."""
|
||||||
|
|
||||||
|
_active_sessions: int = 0
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
app: ASGIApp,
|
||||||
|
*,
|
||||||
|
settings_provider: Callable[[], Settings] = get_settings,
|
||||||
|
unit_of_work_factory: Callable[[], UnitOfWork] = UnitOfWork,
|
||||||
|
refresh_scopes: Iterable[str] | None = None,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(app)
|
||||||
|
self._settings_provider = settings_provider
|
||||||
|
self._unit_of_work_factory = unit_of_work_factory
|
||||||
|
self._refresh_scopes = tuple(
|
||||||
|
refresh_scopes) if refresh_scopes else (_AUTH_SCOPE,)
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
||||||
|
resolved = self._resolve_session(request)
|
||||||
|
|
||||||
|
# Track active sessions for authenticated users
|
||||||
|
try:
|
||||||
|
user_active = bool(resolved.session.user and getattr(
|
||||||
|
resolved.session.user, "is_active", False))
|
||||||
|
except DetachedInstanceError:
|
||||||
|
user_active = False
|
||||||
|
|
||||||
|
if user_active:
|
||||||
|
AuthSessionMiddleware._active_sessions += 1
|
||||||
|
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
|
||||||
|
|
||||||
|
response: Response | None = None
|
||||||
|
try:
|
||||||
|
response = await call_next(request)
|
||||||
|
return response
|
||||||
|
finally:
|
||||||
|
# Always decrement the active sessions counter if we incremented it.
|
||||||
|
if user_active:
|
||||||
|
AuthSessionMiddleware._active_sessions = max(
|
||||||
|
0, AuthSessionMiddleware._active_sessions - 1)
|
||||||
|
ACTIVE_CONNECTIONS.set(AuthSessionMiddleware._active_sessions)
|
||||||
|
|
||||||
|
# Only apply session cookies if a response was produced by downstream
|
||||||
|
# application. If an exception occurred before a response was created
|
||||||
|
# we avoid raising another error here.
|
||||||
|
import logging
|
||||||
|
if response is not None:
|
||||||
|
try:
|
||||||
|
self._apply_session(response, resolved)
|
||||||
|
except Exception:
|
||||||
|
logging.getLogger(__name__).exception(
|
||||||
|
"Failed to apply session cookies to response"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logging.getLogger(__name__).debug(
|
||||||
|
"AuthSessionMiddleware: no response produced by downstream app (response is None)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _resolve_session(self, request: Request) -> _ResolutionResult:
|
||||||
|
settings = self._settings_provider()
|
||||||
|
jwt_settings = settings.jwt_settings()
|
||||||
|
strategy = build_session_strategy(settings.session_settings())
|
||||||
|
|
||||||
|
tokens = extract_session_tokens(request, strategy)
|
||||||
|
session = AuthSession(tokens=tokens)
|
||||||
|
request.state.auth_session = session
|
||||||
|
|
||||||
|
if tokens.access_token:
|
||||||
|
if self._try_access_token(session, tokens, jwt_settings):
|
||||||
|
return _ResolutionResult(session=session, strategy=strategy, jwt_settings=jwt_settings)
|
||||||
|
|
||||||
|
if tokens.refresh_token:
|
||||||
|
self._try_refresh_token(
|
||||||
|
session, tokens.refresh_token, jwt_settings)
|
||||||
|
|
||||||
|
return _ResolutionResult(session=session, strategy=strategy, jwt_settings=jwt_settings)
|
||||||
|
|
||||||
|
def _try_access_token(
|
||||||
|
self,
|
||||||
|
session: AuthSession,
|
||||||
|
tokens: SessionTokens,
|
||||||
|
jwt_settings: JWTSettings,
|
||||||
|
) -> bool:
|
||||||
|
try:
|
||||||
|
payload = decode_access_token(
|
||||||
|
tokens.access_token or "", jwt_settings)
|
||||||
|
except TokenExpiredError:
|
||||||
|
return False
|
||||||
|
except (TokenDecodeError, TokenTypeMismatchError, TokenError):
|
||||||
|
session.mark_cleared()
|
||||||
|
return False
|
||||||
|
|
||||||
|
user = self._load_user(payload.sub)
|
||||||
|
if not user or not user.is_active or _AUTH_SCOPE not in payload.scopes:
|
||||||
|
session.mark_cleared()
|
||||||
|
return False
|
||||||
|
|
||||||
|
session.user = user
|
||||||
|
session.scopes = tuple(payload.scopes)
|
||||||
|
session.set_role_slugs(role.name for role in getattr(user, "roles", []) if role)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _try_refresh_token(
|
||||||
|
self,
|
||||||
|
session: AuthSession,
|
||||||
|
refresh_token: str,
|
||||||
|
jwt_settings: JWTSettings,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
payload = decode_refresh_token(refresh_token, jwt_settings)
|
||||||
|
except (TokenExpiredError, TokenDecodeError, TokenTypeMismatchError, TokenError):
|
||||||
|
session.mark_cleared()
|
||||||
|
return
|
||||||
|
|
||||||
|
user = self._load_user(payload.sub)
|
||||||
|
if not user or not user.is_active or not self._is_refresh_scope_allowed(payload.scopes):
|
||||||
|
session.mark_cleared()
|
||||||
|
return
|
||||||
|
|
||||||
|
session.user = user
|
||||||
|
session.scopes = tuple(payload.scopes)
|
||||||
|
session.set_role_slugs(role.name for role in getattr(user, "roles", []) if role)
|
||||||
|
|
||||||
|
access_token = create_access_token(
|
||||||
|
str(user.id),
|
||||||
|
jwt_settings,
|
||||||
|
scopes=payload.scopes,
|
||||||
|
)
|
||||||
|
new_refresh = create_refresh_token(
|
||||||
|
str(user.id),
|
||||||
|
jwt_settings,
|
||||||
|
scopes=payload.scopes,
|
||||||
|
)
|
||||||
|
session.issue_tokens(access_token=access_token,
|
||||||
|
refresh_token=new_refresh)
|
||||||
|
|
||||||
|
def _is_refresh_scope_allowed(self, scopes: Iterable[str]) -> bool:
|
||||||
|
candidate_scopes = set(scopes)
|
||||||
|
return any(scope in candidate_scopes for scope in self._refresh_scopes)
|
||||||
|
|
||||||
|
def _load_user(self, subject: str) -> Optional[User]:
|
||||||
|
try:
|
||||||
|
user_id = int(subject)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
with self._unit_of_work_factory() as uow:
|
||||||
|
if not uow.users:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
user = uow.users.get(user_id, with_roles=True)
|
||||||
|
except EntityNotFoundError:
|
||||||
|
return None
|
||||||
|
return user
|
||||||
|
|
||||||
|
def _apply_session(self, response: Response, resolved: _ResolutionResult) -> None:
|
||||||
|
session = resolved.session
|
||||||
|
if session.clear_cookies:
|
||||||
|
clear_session_cookies(response, resolved.strategy)
|
||||||
|
return
|
||||||
|
|
||||||
|
if session.issued_access_token:
|
||||||
|
refresh_token = session.issued_refresh_token or session.tokens.refresh_token
|
||||||
|
set_session_cookies(
|
||||||
|
response,
|
||||||
|
access_token=session.issued_access_token,
|
||||||
|
refresh_token=refresh_token,
|
||||||
|
strategy=resolved.strategy,
|
||||||
|
jwt_settings=resolved.jwt_settings,
|
||||||
|
)
|
||||||
58
middleware/metrics.py
Normal file
58
middleware/metrics.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
from fastapi import Request, Response
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
from monitoring.metrics import observe_request
|
||||||
|
from services.metrics import get_metrics_service
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsMiddleware(BaseHTTPMiddleware):
|
||||||
|
async def dispatch(self, request: Request, call_next: Callable[[Request], Response]) -> Response:
|
||||||
|
start_time = time.time()
|
||||||
|
response = await call_next(request)
|
||||||
|
process_time = time.time() - start_time
|
||||||
|
|
||||||
|
observe_request(
|
||||||
|
method=request.method,
|
||||||
|
endpoint=request.url.path,
|
||||||
|
status=response.status_code,
|
||||||
|
seconds=process_time,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store in database asynchronously
|
||||||
|
background_tasks = getattr(request.state, "background_tasks", None)
|
||||||
|
if background_tasks:
|
||||||
|
background_tasks.add_task(
|
||||||
|
store_request_metric,
|
||||||
|
method=request.method,
|
||||||
|
endpoint=request.url.path,
|
||||||
|
status_code=response.status_code,
|
||||||
|
duration_seconds=process_time,
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
async def store_request_metric(
|
||||||
|
method: str, endpoint: str, status_code: int, duration_seconds: float
|
||||||
|
) -> None:
|
||||||
|
"""Store request metric in database."""
|
||||||
|
try:
|
||||||
|
service = get_metrics_service()
|
||||||
|
service.store_metric(
|
||||||
|
metric_name="http_request",
|
||||||
|
value=duration_seconds,
|
||||||
|
labels={"method": method, "endpoint": endpoint,
|
||||||
|
"status": status_code},
|
||||||
|
endpoint=endpoint,
|
||||||
|
method=method,
|
||||||
|
status_code=status_code,
|
||||||
|
duration_seconds=duration_seconds,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# Log error but don't fail the request
|
||||||
|
pass
|
||||||
@@ -10,10 +10,14 @@ async def validate_json(
|
|||||||
) -> Response:
|
) -> Response:
|
||||||
# Only validate JSON for requests with a body
|
# Only validate JSON for requests with a body
|
||||||
if request.method in ("POST", "PUT", "PATCH"):
|
if request.method in ("POST", "PUT", "PATCH"):
|
||||||
try:
|
# Only attempt JSON parsing when the client indicates a JSON content type.
|
||||||
# attempt to parse json body
|
content_type = (request.headers.get("content-type") or "").lower()
|
||||||
await request.json()
|
if "json" in content_type:
|
||||||
except Exception:
|
try:
|
||||||
raise HTTPException(status_code=400, detail="Invalid JSON payload")
|
# attempt to parse json body
|
||||||
|
await request.json()
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Invalid JSON payload")
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -1,10 +1,72 @@
|
|||||||
"""
|
"""Database models and shared metadata for the CalMiner domain."""
|
||||||
models package initializer. Import key models so they're registered
|
|
||||||
with the shared Base.metadata when the package is imported by tests.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from . import application_setting # noqa: F401
|
from .financial_input import FinancialInput
|
||||||
from . import currency # noqa: F401
|
from .metadata import (
|
||||||
from . import role # noqa: F401
|
COST_BUCKET_METADATA,
|
||||||
from . import user # noqa: F401
|
RESOURCE_METADATA,
|
||||||
from . import theme_setting # noqa: F401
|
STOCHASTIC_VARIABLE_METADATA,
|
||||||
|
ResourceDescriptor,
|
||||||
|
StochasticVariableDescriptor,
|
||||||
|
)
|
||||||
|
from .performance_metric import PerformanceMetric
|
||||||
|
from .pricing_settings import (
|
||||||
|
PricingImpuritySettings,
|
||||||
|
PricingMetalSettings,
|
||||||
|
PricingSettings,
|
||||||
|
)
|
||||||
|
from .enums import (
|
||||||
|
CostBucket,
|
||||||
|
DistributionType,
|
||||||
|
FinancialCategory,
|
||||||
|
MiningOperationType,
|
||||||
|
ResourceType,
|
||||||
|
ScenarioStatus,
|
||||||
|
StochasticVariable,
|
||||||
|
)
|
||||||
|
from .project import Project
|
||||||
|
from .scenario import Scenario
|
||||||
|
from .simulation_parameter import SimulationParameter
|
||||||
|
from .user import Role, User, UserRole, password_context
|
||||||
|
from .navigation import NavigationGroup, NavigationLink
|
||||||
|
|
||||||
|
from .profitability_snapshot import ProjectProfitability, ScenarioProfitability
|
||||||
|
from .capex_snapshot import ProjectCapexSnapshot, ScenarioCapexSnapshot
|
||||||
|
from .opex_snapshot import (
|
||||||
|
ProjectOpexSnapshot,
|
||||||
|
ScenarioOpexSnapshot,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FinancialCategory",
|
||||||
|
"FinancialInput",
|
||||||
|
"MiningOperationType",
|
||||||
|
"Project",
|
||||||
|
"ProjectProfitability",
|
||||||
|
"ProjectCapexSnapshot",
|
||||||
|
"ProjectOpexSnapshot",
|
||||||
|
"PricingSettings",
|
||||||
|
"PricingMetalSettings",
|
||||||
|
"PricingImpuritySettings",
|
||||||
|
"Scenario",
|
||||||
|
"ScenarioProfitability",
|
||||||
|
"ScenarioCapexSnapshot",
|
||||||
|
"ScenarioOpexSnapshot",
|
||||||
|
"ScenarioStatus",
|
||||||
|
"DistributionType",
|
||||||
|
"SimulationParameter",
|
||||||
|
"ResourceType",
|
||||||
|
"CostBucket",
|
||||||
|
"StochasticVariable",
|
||||||
|
"RESOURCE_METADATA",
|
||||||
|
"COST_BUCKET_METADATA",
|
||||||
|
"STOCHASTIC_VARIABLE_METADATA",
|
||||||
|
"ResourceDescriptor",
|
||||||
|
"StochasticVariableDescriptor",
|
||||||
|
"User",
|
||||||
|
"Role",
|
||||||
|
"UserRole",
|
||||||
|
"password_context",
|
||||||
|
"PerformanceMetric",
|
||||||
|
"NavigationGroup",
|
||||||
|
"NavigationLink",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy import Boolean, DateTime, String, Text
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationSetting(Base):
|
|
||||||
__tablename__ = "application_setting"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
key: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
|
|
||||||
value: Mapped[str] = mapped_column(Text, nullable=False)
|
|
||||||
value_type: Mapped[str] = mapped_column(
|
|
||||||
String(32), nullable=False, default="string"
|
|
||||||
)
|
|
||||||
category: Mapped[str] = mapped_column(
|
|
||||||
String(32), nullable=False, default="general"
|
|
||||||
)
|
|
||||||
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
||||||
is_editable: Mapped[bool] = mapped_column(
|
|
||||||
Boolean, nullable=False, default=True
|
|
||||||
)
|
|
||||||
created_at: Mapped[datetime] = mapped_column(
|
|
||||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
|
||||||
)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(
|
|
||||||
DateTime(timezone=True),
|
|
||||||
server_default=func.now(),
|
|
||||||
onupdate=func.now(),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<ApplicationSetting key={self.key} category={self.category}>"
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
from sqlalchemy import event, text
|
|
||||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Capex(Base):
|
|
||||||
__tablename__ = "capex"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
amount = Column(Float, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
currency_id = Column(Integer, ForeignKey("currency.id"), nullable=False)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="capex_items")
|
|
||||||
currency = relationship("Currency", back_populates="capex_items")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
f"<Capex id={self.id} scenario_id={self.scenario_id} "
|
|
||||||
f"amount={self.amount} currency_id={self.currency_id}>"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def currency_code(self) -> str:
|
|
||||||
return self.currency.code if self.currency else None
|
|
||||||
|
|
||||||
@currency_code.setter
|
|
||||||
def currency_code(self, value: str) -> None:
|
|
||||||
# store pending code so application code or migrations can pick it up
|
|
||||||
setattr(
|
|
||||||
self, "_currency_code_pending", (value or "USD").strip().upper()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# SQLAlchemy event handlers to ensure currency_id is set before insert/update
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_currency(mapper, connection, target):
|
|
||||||
# If currency_id already set, nothing to do
|
|
||||||
if getattr(target, "currency_id", None):
|
|
||||||
return
|
|
||||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
|
||||||
# Try to find existing currency id
|
|
||||||
row = connection.execute(
|
|
||||||
text("SELECT id FROM currency WHERE code = :code"), {"code": code}
|
|
||||||
).fetchone()
|
|
||||||
if row:
|
|
||||||
cid = row[0]
|
|
||||||
else:
|
|
||||||
# Insert new currency and attempt to get lastrowid
|
|
||||||
res = connection.execute(
|
|
||||||
text(
|
|
||||||
"INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"
|
|
||||||
),
|
|
||||||
{"code": code, "name": code, "symbol": None, "active": True},
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
cid = res.lastrowid
|
|
||||||
except Exception:
|
|
||||||
# fallback: select after insert
|
|
||||||
cid = connection.execute(
|
|
||||||
text("SELECT id FROM currency WHERE code = :code"),
|
|
||||||
{"code": code},
|
|
||||||
).scalar()
|
|
||||||
target.currency_id = cid
|
|
||||||
|
|
||||||
|
|
||||||
event.listen(Capex, "before_insert", _resolve_currency)
|
|
||||||
event.listen(Capex, "before_update", _resolve_currency)
|
|
||||||
111
models/capex_snapshot.py
Normal file
111
models/capex_snapshot.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import JSON, DateTime, ForeignKey, Integer, Numeric, String
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .project import Project
|
||||||
|
from .scenario import Scenario
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectCapexSnapshot(Base):
|
||||||
|
"""Snapshot of aggregated capex metrics at the project level."""
|
||||||
|
|
||||||
|
__tablename__ = "project_capex_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
project_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
total_capex: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
contingency_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
contingency_amount: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
total_with_contingency: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
project: Mapped[Project] = relationship(
|
||||||
|
"Project", back_populates="capex_snapshots"
|
||||||
|
)
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ProjectCapexSnapshot(id={id!r}, project_id={project_id!r}, total_capex={total_capex!r})".format(
|
||||||
|
id=self.id, project_id=self.project_id, total_capex=self.total_capex
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ScenarioCapexSnapshot(Base):
|
||||||
|
"""Snapshot of capex metrics for an individual scenario."""
|
||||||
|
|
||||||
|
__tablename__ = "scenario_capex_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
total_capex: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
contingency_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
contingency_amount: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
total_with_contingency: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario: Mapped[Scenario] = relationship(
|
||||||
|
"Scenario", back_populates="capex_snapshots"
|
||||||
|
)
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ScenarioCapexSnapshot(id={id!r}, scenario_id={scenario_id!r}, total_capex={total_capex!r})".format(
|
||||||
|
id=self.id, scenario_id=self.scenario_id, total_capex=self.total_capex
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Consumption(Base):
|
|
||||||
__tablename__ = "consumption"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
amount = Column(Float, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
unit_name = Column(String(64), nullable=True)
|
|
||||||
unit_symbol = Column(String(16), nullable=True)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="consumption_items")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
f"<Consumption id={self.id} scenario_id={self.scenario_id} "
|
|
||||||
f"amount={self.amount} unit={self.unit_symbol or self.unit_name}>"
|
|
||||||
)
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Boolean
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Currency(Base):
|
|
||||||
__tablename__ = "currency"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
code = Column(String(3), nullable=False, unique=True, index=True)
|
|
||||||
name = Column(String(128), nullable=False)
|
|
||||||
symbol = Column(String(8), nullable=True)
|
|
||||||
is_active = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# reverse relationships (optional)
|
|
||||||
capex_items = relationship(
|
|
||||||
"Capex", back_populates="currency", lazy="select"
|
|
||||||
)
|
|
||||||
opex_items = relationship("Opex", back_populates="currency", lazy="select")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
f"<Currency code={self.code} name={self.name} symbol={self.symbol}>"
|
|
||||||
)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String, JSON
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Distribution(Base):
|
|
||||||
__tablename__ = "distribution"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
name = Column(String, nullable=False)
|
|
||||||
distribution_type = Column(String, nullable=False)
|
|
||||||
parameters = Column(JSON, nullable=True)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Distribution id={self.id} name={self.name} type={self.distribution_type}>"
|
|
||||||
96
models/enums.py
Normal file
96
models/enums.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from sqlalchemy import Enum as SQLEnum
|
||||||
|
|
||||||
|
|
||||||
|
def sql_enum(enum_cls: Type[Enum], *, name: str) -> SQLEnum:
|
||||||
|
"""Build a SQLAlchemy Enum that maps using the enum member values."""
|
||||||
|
|
||||||
|
return SQLEnum(
|
||||||
|
enum_cls,
|
||||||
|
name=name,
|
||||||
|
create_type=False,
|
||||||
|
validate_strings=True,
|
||||||
|
values_callable=lambda enum_cls: [member.value for member in enum_cls],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MiningOperationType(str, Enum):
|
||||||
|
"""Supported mining operation categories."""
|
||||||
|
|
||||||
|
OPEN_PIT = "open_pit"
|
||||||
|
UNDERGROUND = "underground"
|
||||||
|
IN_SITU_LEACH = "in_situ_leach"
|
||||||
|
PLACER = "placer"
|
||||||
|
QUARRY = "quarry"
|
||||||
|
MOUNTAINTOP_REMOVAL = "mountaintop_removal"
|
||||||
|
OTHER = "other"
|
||||||
|
|
||||||
|
|
||||||
|
class ScenarioStatus(str, Enum):
|
||||||
|
"""Lifecycle states for project scenarios."""
|
||||||
|
|
||||||
|
DRAFT = "draft"
|
||||||
|
ACTIVE = "active"
|
||||||
|
ARCHIVED = "archived"
|
||||||
|
|
||||||
|
|
||||||
|
class FinancialCategory(str, Enum):
|
||||||
|
"""Enumeration of cost and revenue classifications."""
|
||||||
|
|
||||||
|
CAPITAL_EXPENDITURE = "capex"
|
||||||
|
OPERATING_EXPENDITURE = "opex"
|
||||||
|
REVENUE = "revenue"
|
||||||
|
CONTINGENCY = "contingency"
|
||||||
|
OTHER = "other"
|
||||||
|
|
||||||
|
|
||||||
|
class DistributionType(str, Enum):
|
||||||
|
"""Supported stochastic distribution families for simulations."""
|
||||||
|
|
||||||
|
NORMAL = "normal"
|
||||||
|
TRIANGULAR = "triangular"
|
||||||
|
UNIFORM = "uniform"
|
||||||
|
LOGNORMAL = "lognormal"
|
||||||
|
CUSTOM = "custom"
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceType(str, Enum):
|
||||||
|
"""Primary consumables and resources used in mining operations."""
|
||||||
|
|
||||||
|
DIESEL = "diesel"
|
||||||
|
ELECTRICITY = "electricity"
|
||||||
|
WATER = "water"
|
||||||
|
EXPLOSIVES = "explosives"
|
||||||
|
REAGENTS = "reagents"
|
||||||
|
LABOR = "labor"
|
||||||
|
EQUIPMENT_HOURS = "equipment_hours"
|
||||||
|
TAILINGS_CAPACITY = "tailings_capacity"
|
||||||
|
|
||||||
|
|
||||||
|
class CostBucket(str, Enum):
|
||||||
|
"""Granular cost buckets aligned with project accounting."""
|
||||||
|
|
||||||
|
CAPITAL_INITIAL = "capital_initial"
|
||||||
|
CAPITAL_SUSTAINING = "capital_sustaining"
|
||||||
|
OPERATING_FIXED = "operating_fixed"
|
||||||
|
OPERATING_VARIABLE = "operating_variable"
|
||||||
|
MAINTENANCE = "maintenance"
|
||||||
|
RECLAMATION = "reclamation"
|
||||||
|
ROYALTIES = "royalties"
|
||||||
|
GENERAL_ADMIN = "general_admin"
|
||||||
|
|
||||||
|
|
||||||
|
class StochasticVariable(str, Enum):
|
||||||
|
"""Domain variables that typically require probabilistic modelling."""
|
||||||
|
|
||||||
|
ORE_GRADE = "ore_grade"
|
||||||
|
RECOVERY_RATE = "recovery_rate"
|
||||||
|
METAL_PRICE = "metal_price"
|
||||||
|
OPERATING_COST = "operating_cost"
|
||||||
|
CAPITAL_COST = "capital_cost"
|
||||||
|
DISCOUNT_RATE = "discount_rate"
|
||||||
|
THROUGHPUT = "throughput"
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Equipment(Base):
|
|
||||||
__tablename__ = "equipment"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
name = Column(String, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="equipment_items")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Equipment id={self.id} scenario_id={self.scenario_id} name={self.name}>"
|
|
||||||
62
models/financial_input.py
Normal file
62
models/financial_input.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import date, datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
Date,
|
||||||
|
DateTime,
|
||||||
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||||
|
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
from .enums import CostBucket, FinancialCategory, sql_enum
|
||||||
|
from services.currency import normalise_currency
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .scenario import Scenario
|
||||||
|
|
||||||
|
|
||||||
|
class FinancialInput(Base):
|
||||||
|
"""Line-item financial assumption attached to a scenario."""
|
||||||
|
|
||||||
|
__tablename__ = "financial_inputs"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
|
category: Mapped[FinancialCategory] = mapped_column(
|
||||||
|
sql_enum(FinancialCategory, name="financialcategory"), nullable=False
|
||||||
|
)
|
||||||
|
cost_bucket: Mapped[CostBucket | None] = mapped_column(
|
||||||
|
sql_enum(CostBucket, name="costbucket"), nullable=True
|
||||||
|
)
|
||||||
|
amount: Mapped[float] = mapped_column(Numeric(18, 2), nullable=False)
|
||||||
|
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
effective_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||||
|
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario: Mapped["Scenario"] = relationship(
|
||||||
|
"Scenario", back_populates="financial_inputs")
|
||||||
|
|
||||||
|
@validates("currency")
|
||||||
|
def _validate_currency(self, key: str, value: str | None) -> str | None:
|
||||||
|
return normalise_currency(value)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"FinancialInput(id={self.id!r}, scenario_id={self.scenario_id!r}, name={self.name!r})"
|
||||||
31
models/import_export_log.py
Normal file
31
models/import_export_log.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ImportExportLog(Base):
|
||||||
|
"""Audit log for import and export operations."""
|
||||||
|
|
||||||
|
__tablename__ = "import_export_logs"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
action = Column(String(32), nullable=False) # preview, commit, export
|
||||||
|
dataset = Column(String(32), nullable=False) # projects, scenarios, etc.
|
||||||
|
status = Column(String(16), nullable=False) # success, failure
|
||||||
|
filename = Column(String(255), nullable=True)
|
||||||
|
row_count = Column(Integer, nullable=True)
|
||||||
|
detail = Column(Text, nullable=True)
|
||||||
|
user_id = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||||
|
created_at = Column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
f"ImportExportLog(id={self.id}, action={self.action}, "
|
||||||
|
f"dataset={self.dataset}, status={self.status})"
|
||||||
|
)
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
from sqlalchemy import Column, Date, Float, ForeignKey, Integer, String
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Maintenance(Base):
|
|
||||||
__tablename__ = "maintenance"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
equipment_id = Column(Integer, ForeignKey("equipment.id"), nullable=False)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
maintenance_date = Column(Date, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
cost = Column(Float, nullable=False)
|
|
||||||
|
|
||||||
equipment = relationship("Equipment")
|
|
||||||
scenario = relationship("Scenario", back_populates="maintenance_items")
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return (
|
|
||||||
f"<Maintenance id={self.id} equipment_id={self.equipment_id} "
|
|
||||||
f"scenario_id={self.scenario_id} date={self.maintenance_date} cost={self.cost}>"
|
|
||||||
)
|
|
||||||
108
models/metadata.py
Normal file
108
models/metadata.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from .enums import ResourceType, CostBucket, StochasticVariable
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ResourceDescriptor:
|
||||||
|
"""Describes canonical metadata for a resource type."""
|
||||||
|
|
||||||
|
unit: str
|
||||||
|
description: str
|
||||||
|
|
||||||
|
|
||||||
|
RESOURCE_METADATA: dict[ResourceType, ResourceDescriptor] = {
|
||||||
|
ResourceType.DIESEL: ResourceDescriptor(unit="L", description="Diesel fuel consumption"),
|
||||||
|
ResourceType.ELECTRICITY: ResourceDescriptor(unit="kWh", description="Electrical power usage"),
|
||||||
|
ResourceType.WATER: ResourceDescriptor(unit="m3", description="Process and dust suppression water"),
|
||||||
|
ResourceType.EXPLOSIVES: ResourceDescriptor(unit="kg", description="Blasting agent consumption"),
|
||||||
|
ResourceType.REAGENTS: ResourceDescriptor(unit="kg", description="Processing reagents"),
|
||||||
|
ResourceType.LABOR: ResourceDescriptor(unit="hours", description="Direct labor hours"),
|
||||||
|
ResourceType.EQUIPMENT_HOURS: ResourceDescriptor(unit="hours", description="Mobile equipment operating hours"),
|
||||||
|
ResourceType.TAILINGS_CAPACITY: ResourceDescriptor(unit="m3", description="Tailings storage usage"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class CostBucketDescriptor:
|
||||||
|
"""Describes reporting label and guidance for a cost bucket."""
|
||||||
|
|
||||||
|
label: str
|
||||||
|
description: str
|
||||||
|
|
||||||
|
|
||||||
|
COST_BUCKET_METADATA: dict[CostBucket, CostBucketDescriptor] = {
|
||||||
|
CostBucket.CAPITAL_INITIAL: CostBucketDescriptor(
|
||||||
|
label="Initial Capital",
|
||||||
|
description="Pre-production capital required to construct the mine",
|
||||||
|
),
|
||||||
|
CostBucket.CAPITAL_SUSTAINING: CostBucketDescriptor(
|
||||||
|
label="Sustaining Capital",
|
||||||
|
description="Ongoing capital investments to maintain operations",
|
||||||
|
),
|
||||||
|
CostBucket.OPERATING_FIXED: CostBucketDescriptor(
|
||||||
|
label="Fixed Operating",
|
||||||
|
description="Fixed operating costs independent of production rate",
|
||||||
|
),
|
||||||
|
CostBucket.OPERATING_VARIABLE: CostBucketDescriptor(
|
||||||
|
label="Variable Operating",
|
||||||
|
description="Costs that scale with throughput or production",
|
||||||
|
),
|
||||||
|
CostBucket.MAINTENANCE: CostBucketDescriptor(
|
||||||
|
label="Maintenance",
|
||||||
|
description="Maintenance and repair expenditures",
|
||||||
|
),
|
||||||
|
CostBucket.RECLAMATION: CostBucketDescriptor(
|
||||||
|
label="Reclamation",
|
||||||
|
description="Mine closure and reclamation liabilities",
|
||||||
|
),
|
||||||
|
CostBucket.ROYALTIES: CostBucketDescriptor(
|
||||||
|
label="Royalties",
|
||||||
|
description="Royalty and streaming obligations",
|
||||||
|
),
|
||||||
|
CostBucket.GENERAL_ADMIN: CostBucketDescriptor(
|
||||||
|
label="G&A",
|
||||||
|
description="Corporate and site general and administrative costs",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class StochasticVariableDescriptor:
|
||||||
|
"""Metadata describing how a stochastic variable is typically modelled."""
|
||||||
|
|
||||||
|
unit: str
|
||||||
|
description: str
|
||||||
|
|
||||||
|
|
||||||
|
STOCHASTIC_VARIABLE_METADATA: dict[StochasticVariable, StochasticVariableDescriptor] = {
|
||||||
|
StochasticVariable.ORE_GRADE: StochasticVariableDescriptor(
|
||||||
|
unit="g/t",
|
||||||
|
description="Head grade variability across the ore body",
|
||||||
|
),
|
||||||
|
StochasticVariable.RECOVERY_RATE: StochasticVariableDescriptor(
|
||||||
|
unit="%",
|
||||||
|
description="Metallurgical recovery uncertainty",
|
||||||
|
),
|
||||||
|
StochasticVariable.METAL_PRICE: StochasticVariableDescriptor(
|
||||||
|
unit="$/unit",
|
||||||
|
description="Commodity price fluctuations",
|
||||||
|
),
|
||||||
|
StochasticVariable.OPERATING_COST: StochasticVariableDescriptor(
|
||||||
|
unit="$/t",
|
||||||
|
description="Operating cost per tonne volatility",
|
||||||
|
),
|
||||||
|
StochasticVariable.CAPITAL_COST: StochasticVariableDescriptor(
|
||||||
|
unit="$",
|
||||||
|
description="Capital cost overrun/underrun potential",
|
||||||
|
),
|
||||||
|
StochasticVariable.DISCOUNT_RATE: StochasticVariableDescriptor(
|
||||||
|
unit="%",
|
||||||
|
description="Discount rate sensitivity",
|
||||||
|
),
|
||||||
|
StochasticVariable.THROUGHPUT: StochasticVariableDescriptor(
|
||||||
|
unit="t/d",
|
||||||
|
description="Plant throughput variability",
|
||||||
|
),
|
||||||
|
}
|
||||||
125
models/navigation.py
Normal file
125
models/navigation.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
Boolean,
|
||||||
|
CheckConstraint,
|
||||||
|
DateTime,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
Integer,
|
||||||
|
String,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
from sqlalchemy.ext.mutable import MutableList
|
||||||
|
from sqlalchemy import JSON
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class NavigationGroup(Base):
|
||||||
|
__tablename__ = "navigation_groups"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("slug", name="uq_navigation_groups_slug"),
|
||||||
|
Index("ix_navigation_groups_sort_order", "sort_order"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
slug: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||||
|
label: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||||
|
sort_order: Mapped[int] = mapped_column(
|
||||||
|
Integer, nullable=False, default=100)
|
||||||
|
icon: Mapped[Optional[str]] = mapped_column(String(64))
|
||||||
|
tooltip: Mapped[Optional[str]] = mapped_column(String(255))
|
||||||
|
is_enabled: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
links: Mapped[List["NavigationLink"]] = relationship(
|
||||||
|
"NavigationLink",
|
||||||
|
back_populates="group",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by="NavigationLink.sort_order",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"NavigationGroup(id={self.id!r}, slug={self.slug!r})"
|
||||||
|
|
||||||
|
|
||||||
|
class NavigationLink(Base):
|
||||||
|
__tablename__ = "navigation_links"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("group_id", "slug",
|
||||||
|
name="uq_navigation_links_group_slug"),
|
||||||
|
Index("ix_navigation_links_group_sort", "group_id", "sort_order"),
|
||||||
|
Index("ix_navigation_links_parent_sort",
|
||||||
|
"parent_link_id", "sort_order"),
|
||||||
|
CheckConstraint(
|
||||||
|
"(route_name IS NOT NULL) OR (href_override IS NOT NULL)",
|
||||||
|
name="ck_navigation_links_route_or_href",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
group_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("navigation_groups.id", ondelete="CASCADE"), nullable=False
|
||||||
|
)
|
||||||
|
parent_link_id: Mapped[Optional[int]] = mapped_column(
|
||||||
|
ForeignKey("navigation_links.id", ondelete="CASCADE")
|
||||||
|
)
|
||||||
|
slug: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||||
|
label: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||||
|
route_name: Mapped[Optional[str]] = mapped_column(String(128))
|
||||||
|
href_override: Mapped[Optional[str]] = mapped_column(String(512))
|
||||||
|
match_prefix: Mapped[Optional[str]] = mapped_column(String(512))
|
||||||
|
sort_order: Mapped[int] = mapped_column(
|
||||||
|
Integer, nullable=False, default=100)
|
||||||
|
icon: Mapped[Optional[str]] = mapped_column(String(64))
|
||||||
|
tooltip: Mapped[Optional[str]] = mapped_column(String(255))
|
||||||
|
required_roles: Mapped[list[str]] = mapped_column(
|
||||||
|
MutableList.as_mutable(JSON), nullable=False, default=list
|
||||||
|
)
|
||||||
|
is_enabled: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True)
|
||||||
|
is_external: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=False)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
group: Mapped[NavigationGroup] = relationship(
|
||||||
|
NavigationGroup,
|
||||||
|
back_populates="links",
|
||||||
|
)
|
||||||
|
parent: Mapped[Optional["NavigationLink"]] = relationship(
|
||||||
|
"NavigationLink",
|
||||||
|
remote_side="NavigationLink.id",
|
||||||
|
back_populates="children",
|
||||||
|
)
|
||||||
|
children: Mapped[List["NavigationLink"]] = relationship(
|
||||||
|
"NavigationLink",
|
||||||
|
back_populates="parent",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by="NavigationLink.sort_order",
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_visible_for_roles(self, roles: list[str]) -> bool:
|
||||||
|
if not self.required_roles:
|
||||||
|
return True
|
||||||
|
role_set = set(roles)
|
||||||
|
return any(role in role_set for role in self.required_roles)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"NavigationLink(id={self.id!r}, slug={self.slug!r})"
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
from sqlalchemy import event, text
|
|
||||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Opex(Base):
|
|
||||||
__tablename__ = "opex"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
amount = Column(Float, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
currency_id = Column(Integer, ForeignKey("currency.id"), nullable=False)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="opex_items")
|
|
||||||
currency = relationship("Currency", back_populates="opex_items")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
f"<Opex id={self.id} scenario_id={self.scenario_id} "
|
|
||||||
f"amount={self.amount} currency_id={self.currency_id}>"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def currency_code(self) -> str:
|
|
||||||
return self.currency.code if self.currency else None
|
|
||||||
|
|
||||||
@currency_code.setter
|
|
||||||
def currency_code(self, value: str) -> None:
|
|
||||||
setattr(
|
|
||||||
self, "_currency_code_pending", (value or "USD").strip().upper()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_currency_opex(mapper, connection, target):
|
|
||||||
if getattr(target, "currency_id", None):
|
|
||||||
return
|
|
||||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
|
||||||
row = connection.execute(
|
|
||||||
text("SELECT id FROM currency WHERE code = :code"), {"code": code}
|
|
||||||
).fetchone()
|
|
||||||
if row:
|
|
||||||
cid = row[0]
|
|
||||||
else:
|
|
||||||
res = connection.execute(
|
|
||||||
text(
|
|
||||||
"INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"
|
|
||||||
),
|
|
||||||
{"code": code, "name": code, "symbol": None, "active": True},
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
cid = res.lastrowid
|
|
||||||
except Exception:
|
|
||||||
cid = connection.execute(
|
|
||||||
text("SELECT id FROM currency WHERE code = :code"),
|
|
||||||
{"code": code},
|
|
||||||
).scalar()
|
|
||||||
target.currency_id = cid
|
|
||||||
|
|
||||||
|
|
||||||
event.listen(Opex, "before_insert", _resolve_currency_opex)
|
|
||||||
event.listen(Opex, "before_update", _resolve_currency_opex)
|
|
||||||
123
models/opex_snapshot.py
Normal file
123
models/opex_snapshot.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import JSON, Boolean, DateTime, ForeignKey, Integer, Numeric, String
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .project import Project
|
||||||
|
from .scenario import Scenario
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectOpexSnapshot(Base):
|
||||||
|
"""Snapshot of recurring opex metrics at the project level."""
|
||||||
|
|
||||||
|
__tablename__ = "project_opex_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
project_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
overall_annual: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
escalated_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
annual_average: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
evaluation_horizon_years: Mapped[int | None] = mapped_column(
|
||||||
|
Integer, nullable=True)
|
||||||
|
escalation_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
apply_escalation: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True)
|
||||||
|
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
project: Mapped[Project] = relationship(
|
||||||
|
"Project", back_populates="opex_snapshots"
|
||||||
|
)
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ProjectOpexSnapshot(id={id!r}, project_id={project_id!r}, overall_annual={overall_annual!r})".format(
|
||||||
|
id=self.id,
|
||||||
|
project_id=self.project_id,
|
||||||
|
overall_annual=self.overall_annual,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ScenarioOpexSnapshot(Base):
|
||||||
|
"""Snapshot of opex metrics for an individual scenario."""
|
||||||
|
|
||||||
|
__tablename__ = "scenario_opex_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
overall_annual: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
escalated_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
annual_average: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
evaluation_horizon_years: Mapped[int | None] = mapped_column(
|
||||||
|
Integer, nullable=True)
|
||||||
|
escalation_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
apply_escalation: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True)
|
||||||
|
component_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario: Mapped[Scenario] = relationship(
|
||||||
|
"Scenario", back_populates="opex_snapshots"
|
||||||
|
)
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ScenarioOpexSnapshot(id={id!r}, scenario_id={scenario_id!r}, overall_annual={overall_annual!r})".format(
|
||||||
|
id=self.id,
|
||||||
|
scenario_id=self.scenario_id,
|
||||||
|
overall_annual=self.overall_annual,
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from sqlalchemy import ForeignKey, JSON
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Parameter(Base):
|
|
||||||
__tablename__ = "parameter"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
|
||||||
scenario_id: Mapped[int] = mapped_column(
|
|
||||||
ForeignKey("scenario.id"), nullable=False
|
|
||||||
)
|
|
||||||
name: Mapped[str] = mapped_column(nullable=False)
|
|
||||||
value: Mapped[float] = mapped_column(nullable=False)
|
|
||||||
distribution_id: Mapped[Optional[int]] = mapped_column(
|
|
||||||
ForeignKey("distribution.id"), nullable=True
|
|
||||||
)
|
|
||||||
distribution_type: Mapped[Optional[str]] = mapped_column(nullable=True)
|
|
||||||
distribution_parameters: Mapped[Optional[Dict[str, Any]]] = mapped_column(
|
|
||||||
JSON, nullable=True
|
|
||||||
)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="parameters")
|
|
||||||
distribution = relationship("Distribution")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Parameter id={self.id} name={self.name} value={self.value}>"
|
|
||||||
24
models/performance_metric.py
Normal file
24
models/performance_metric.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import Column, DateTime, Float, Integer, String
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class PerformanceMetric(Base):
|
||||||
|
__tablename__ = "performance_metrics"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
timestamp = Column(DateTime, default=datetime.utcnow, index=True)
|
||||||
|
metric_name = Column(String, index=True)
|
||||||
|
value = Column(Float)
|
||||||
|
labels = Column(String) # JSON string of labels
|
||||||
|
endpoint = Column(String, index=True, nullable=True)
|
||||||
|
method = Column(String, nullable=True)
|
||||||
|
status_code = Column(Integer, nullable=True)
|
||||||
|
duration_seconds = Column(Float, nullable=True)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<PerformanceMetric(id={self.id}, name={self.metric_name}, value={self.value})>"
|
||||||
176
models/pricing_settings.py
Normal file
176
models/pricing_settings.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
"""Database models for persisted pricing configuration settings."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
JSON,
|
||||||
|
DateTime,
|
||||||
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
from services.currency import normalise_currency
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .project import Project
|
||||||
|
|
||||||
|
|
||||||
|
class PricingSettings(Base):
|
||||||
|
"""Persisted pricing defaults applied to scenario evaluations."""
|
||||||
|
|
||||||
|
__tablename__ = "pricing_settings"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(128), nullable=False, unique=True)
|
||||||
|
slug: Mapped[str] = mapped_column(String(64), nullable=False, unique=True)
|
||||||
|
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
default_currency: Mapped[str | None] = mapped_column(
|
||||||
|
String(3), nullable=True)
|
||||||
|
default_payable_pct: Mapped[float] = mapped_column(
|
||||||
|
Numeric(5, 2), nullable=False, default=100.0
|
||||||
|
)
|
||||||
|
moisture_threshold_pct: Mapped[float] = mapped_column(
|
||||||
|
Numeric(5, 2), nullable=False, default=8.0
|
||||||
|
)
|
||||||
|
moisture_penalty_per_pct: Mapped[float] = mapped_column(
|
||||||
|
Numeric(14, 4), nullable=False, default=0.0
|
||||||
|
)
|
||||||
|
metadata_payload: Mapped[dict | None] = mapped_column(
|
||||||
|
"metadata", JSON, nullable=True
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
metal_overrides: Mapped[list["PricingMetalSettings"]] = relationship(
|
||||||
|
"PricingMetalSettings",
|
||||||
|
back_populates="pricing_settings",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
impurity_overrides: Mapped[list["PricingImpuritySettings"]] = relationship(
|
||||||
|
"PricingImpuritySettings",
|
||||||
|
back_populates="pricing_settings",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
projects: Mapped[list["Project"]] = relationship(
|
||||||
|
"Project",
|
||||||
|
back_populates="pricing_settings",
|
||||||
|
cascade="all",
|
||||||
|
)
|
||||||
|
|
||||||
|
@validates("slug")
|
||||||
|
def _normalise_slug(self, key: str, value: str) -> str:
|
||||||
|
return value.strip().lower()
|
||||||
|
|
||||||
|
@validates("default_currency")
|
||||||
|
def _validate_currency(self, key: str, value: str | None) -> str | None:
|
||||||
|
return normalise_currency(value)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"PricingSettings(id={self.id!r}, slug={self.slug!r})"
|
||||||
|
|
||||||
|
|
||||||
|
class PricingMetalSettings(Base):
|
||||||
|
"""Contract-specific overrides for a particular metal."""
|
||||||
|
|
||||||
|
__tablename__ = "pricing_metal_settings"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"pricing_settings_id", "metal_code", name="uq_pricing_metal_settings_code"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
pricing_settings_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("pricing_settings.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
metal_code: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||||
|
payable_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(5, 2), nullable=True)
|
||||||
|
moisture_threshold_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(5, 2), nullable=True)
|
||||||
|
moisture_penalty_per_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(14, 4), nullable=True
|
||||||
|
)
|
||||||
|
data: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
pricing_settings: Mapped["PricingSettings"] = relationship(
|
||||||
|
"PricingSettings", back_populates="metal_overrides"
|
||||||
|
)
|
||||||
|
|
||||||
|
@validates("metal_code")
|
||||||
|
def _normalise_metal_code(self, key: str, value: str) -> str:
|
||||||
|
return value.strip().lower()
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"PricingMetalSettings(" # noqa: ISC001
|
||||||
|
f"id={self.id!r}, pricing_settings_id={self.pricing_settings_id!r}, "
|
||||||
|
f"metal_code={self.metal_code!r})"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PricingImpuritySettings(Base):
|
||||||
|
"""Impurity penalty thresholds associated with pricing settings."""
|
||||||
|
|
||||||
|
__tablename__ = "pricing_impurity_settings"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"pricing_settings_id",
|
||||||
|
"impurity_code",
|
||||||
|
name="uq_pricing_impurity_settings_code",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
pricing_settings_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("pricing_settings.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
impurity_code: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||||
|
threshold_ppm: Mapped[float] = mapped_column(
|
||||||
|
Numeric(14, 4), nullable=False, default=0.0)
|
||||||
|
penalty_per_ppm: Mapped[float] = mapped_column(
|
||||||
|
Numeric(14, 4), nullable=False, default=0.0)
|
||||||
|
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
pricing_settings: Mapped["PricingSettings"] = relationship(
|
||||||
|
"PricingSettings", back_populates="impurity_overrides"
|
||||||
|
)
|
||||||
|
|
||||||
|
@validates("impurity_code")
|
||||||
|
def _normalise_impurity_code(self, key: str, value: str) -> str:
|
||||||
|
return value.strip().upper()
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"PricingImpuritySettings(" # noqa: ISC001
|
||||||
|
f"id={self.id!r}, pricing_settings_id={self.pricing_settings_id!r}, "
|
||||||
|
f"impurity_code={self.impurity_code!r})"
|
||||||
|
)
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, Float, String, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class ProductionOutput(Base):
|
|
||||||
__tablename__ = "production_output"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
amount = Column(Float, nullable=False)
|
|
||||||
description = Column(String, nullable=True)
|
|
||||||
unit_name = Column(String(64), nullable=True)
|
|
||||||
unit_symbol = Column(String(16), nullable=True)
|
|
||||||
|
|
||||||
scenario = relationship(
|
|
||||||
"Scenario", back_populates="production_output_items"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
f"<ProductionOutput id={self.id} scenario_id={self.scenario_id} "
|
|
||||||
f"amount={self.amount} unit={self.unit_symbol or self.unit_name}>"
|
|
||||||
)
|
|
||||||
133
models/profitability_snapshot.py
Normal file
133
models/profitability_snapshot.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from sqlalchemy import JSON, DateTime, ForeignKey, Integer, Numeric, String
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .project import Project
|
||||||
|
from .scenario import Scenario
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectProfitability(Base):
|
||||||
|
"""Snapshot of aggregated profitability metrics at the project level."""
|
||||||
|
|
||||||
|
__tablename__ = "project_profitability_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
project_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
npv: Mapped[float | None] = mapped_column(Numeric(18, 2), nullable=True)
|
||||||
|
irr_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
payback_period_years: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 4), nullable=True
|
||||||
|
)
|
||||||
|
margin_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
revenue_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
opex_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
sustaining_capex_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
capex: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
net_cash_flow_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
project: Mapped[Project] = relationship(
|
||||||
|
"Project", back_populates="profitability_snapshots")
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ProjectProfitability(id={id!r}, project_id={project_id!r}, npv={npv!r})".format(
|
||||||
|
id=self.id, project_id=self.project_id, npv=self.npv
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ScenarioProfitability(Base):
|
||||||
|
"""Snapshot of profitability metrics for an individual scenario."""
|
||||||
|
|
||||||
|
__tablename__ = "scenario_profitability_snapshots"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
created_by_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True
|
||||||
|
)
|
||||||
|
calculation_source: Mapped[str | None] = mapped_column(
|
||||||
|
String(64), nullable=True)
|
||||||
|
calculated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
currency_code: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
npv: Mapped[float | None] = mapped_column(Numeric(18, 2), nullable=True)
|
||||||
|
irr_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
payback_period_years: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 4), nullable=True
|
||||||
|
)
|
||||||
|
margin_pct: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(12, 6), nullable=True)
|
||||||
|
revenue_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
opex_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
sustaining_capex_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
capex: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True)
|
||||||
|
net_cash_flow_total: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 2), nullable=True
|
||||||
|
)
|
||||||
|
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario: Mapped[Scenario] = relationship(
|
||||||
|
"Scenario", back_populates="profitability_snapshots")
|
||||||
|
created_by: Mapped[User | None] = relationship("User")
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
"ScenarioProfitability(id={id!r}, scenario_id={scenario_id!r}, npv={npv!r})".format(
|
||||||
|
id=self.id, scenario_id=self.scenario_id, npv=self.npv
|
||||||
|
)
|
||||||
|
)
|
||||||
104
models/project.py
Normal file
104
models/project.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING, List
|
||||||
|
|
||||||
|
from .enums import MiningOperationType, sql_enum
|
||||||
|
from .profitability_snapshot import ProjectProfitability
|
||||||
|
from .capex_snapshot import ProjectCapexSnapshot
|
||||||
|
from .opex_snapshot import ProjectOpexSnapshot
|
||||||
|
|
||||||
|
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .scenario import Scenario
|
||||||
|
from .pricing_settings import PricingSettings
|
||||||
|
|
||||||
|
|
||||||
|
class Project(Base):
|
||||||
|
"""Top-level mining project grouping multiple scenarios."""
|
||||||
|
|
||||||
|
__tablename__ = "projects"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||||
|
location: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||||
|
operation_type: Mapped[MiningOperationType] = mapped_column(
|
||||||
|
sql_enum(MiningOperationType, name="miningoperationtype"),
|
||||||
|
nullable=False,
|
||||||
|
default=MiningOperationType.OTHER,
|
||||||
|
)
|
||||||
|
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
pricing_settings_id: Mapped[int | None] = mapped_column(
|
||||||
|
ForeignKey("pricing_settings.id", ondelete="SET NULL"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenarios: Mapped[List["Scenario"]] = relationship(
|
||||||
|
"Scenario",
|
||||||
|
back_populates="project",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
pricing_settings: Mapped["PricingSettings | None"] = relationship(
|
||||||
|
"PricingSettings",
|
||||||
|
back_populates="projects",
|
||||||
|
)
|
||||||
|
profitability_snapshots: Mapped[List["ProjectProfitability"]] = relationship(
|
||||||
|
"ProjectProfitability",
|
||||||
|
back_populates="project",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ProjectProfitability.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
capex_snapshots: Mapped[List["ProjectCapexSnapshot"]] = relationship(
|
||||||
|
"ProjectCapexSnapshot",
|
||||||
|
back_populates="project",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ProjectCapexSnapshot.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
opex_snapshots: Mapped[List["ProjectOpexSnapshot"]] = relationship(
|
||||||
|
"ProjectOpexSnapshot",
|
||||||
|
back_populates="project",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ProjectOpexSnapshot.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_profitability(self) -> "ProjectProfitability | None":
|
||||||
|
"""Return the most recent profitability snapshot, if any."""
|
||||||
|
|
||||||
|
if not self.profitability_snapshots:
|
||||||
|
return None
|
||||||
|
return self.profitability_snapshots[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_capex(self) -> "ProjectCapexSnapshot | None":
|
||||||
|
"""Return the most recent capex snapshot, if any."""
|
||||||
|
|
||||||
|
if not self.capex_snapshots:
|
||||||
|
return None
|
||||||
|
return self.capex_snapshots[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_opex(self) -> "ProjectOpexSnapshot | None":
|
||||||
|
"""Return the most recent opex snapshot, if any."""
|
||||||
|
|
||||||
|
if not self.opex_snapshots:
|
||||||
|
return None
|
||||||
|
return self.opex_snapshots[0]
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||||
|
return f"Project(id={self.id!r}, name={self.name!r})"
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class Role(Base):
|
|
||||||
__tablename__ = "roles"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
name = Column(String, unique=True, index=True)
|
|
||||||
|
|
||||||
users = relationship("User", back_populates="role")
|
|
||||||
@@ -1,36 +1,133 @@
|
|||||||
from sqlalchemy import Column, Integer, String, DateTime, func
|
from __future__ import annotations
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from models.simulation_result import SimulationResult
|
from datetime import date, datetime
|
||||||
from models.capex import Capex
|
from typing import TYPE_CHECKING, List
|
||||||
from models.opex import Opex
|
|
||||||
from models.consumption import Consumption
|
from sqlalchemy import (
|
||||||
from models.production_output import ProductionOutput
|
Date,
|
||||||
from models.equipment import Equipment
|
DateTime,
|
||||||
from models.maintenance import Maintenance
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
from config.database import Base
|
from config.database import Base
|
||||||
|
from services.currency import normalise_currency
|
||||||
|
from .enums import ResourceType, ScenarioStatus, sql_enum
|
||||||
|
from .profitability_snapshot import ScenarioProfitability
|
||||||
|
from .capex_snapshot import ScenarioCapexSnapshot
|
||||||
|
from .opex_snapshot import ScenarioOpexSnapshot
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .financial_input import FinancialInput
|
||||||
|
from .project import Project
|
||||||
|
from .simulation_parameter import SimulationParameter
|
||||||
|
|
||||||
|
|
||||||
class Scenario(Base):
|
class Scenario(Base):
|
||||||
__tablename__ = "scenario"
|
"""A specific configuration of assumptions for a project."""
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
__tablename__ = "scenarios"
|
||||||
name = Column(String, unique=True, nullable=False)
|
__table_args__ = (
|
||||||
description = Column(String)
|
UniqueConstraint("project_id", "name",
|
||||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
name="uq_scenarios_project_name"),
|
||||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
|
||||||
parameters = relationship("Parameter", back_populates="scenario")
|
|
||||||
simulation_results = relationship(
|
|
||||||
SimulationResult, back_populates="scenario"
|
|
||||||
)
|
)
|
||||||
capex_items = relationship(Capex, back_populates="scenario")
|
|
||||||
opex_items = relationship(Opex, back_populates="scenario")
|
|
||||||
consumption_items = relationship(Consumption, back_populates="scenario")
|
|
||||||
production_output_items = relationship(
|
|
||||||
ProductionOutput, back_populates="scenario"
|
|
||||||
)
|
|
||||||
equipment_items = relationship(Equipment, back_populates="scenario")
|
|
||||||
maintenance_items = relationship(Maintenance, back_populates="scenario")
|
|
||||||
|
|
||||||
# relationships can be defined later
|
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||||
def __repr__(self):
|
project_id: Mapped[int] = mapped_column(
|
||||||
return f"<Scenario id={self.id} name={self.name}>"
|
ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
|
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
status: Mapped[ScenarioStatus] = mapped_column(
|
||||||
|
sql_enum(ScenarioStatus, name="scenariostatus"),
|
||||||
|
nullable=False,
|
||||||
|
default=ScenarioStatus.DRAFT,
|
||||||
|
)
|
||||||
|
start_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||||
|
end_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||||
|
discount_rate: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(5, 2), nullable=True)
|
||||||
|
currency: Mapped[str | None] = mapped_column(String(3), nullable=True)
|
||||||
|
primary_resource: Mapped[ResourceType | None] = mapped_column(
|
||||||
|
sql_enum(ResourceType, name="resourcetype"), nullable=True
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
project: Mapped["Project"] = relationship(
|
||||||
|
"Project", back_populates="scenarios")
|
||||||
|
financial_inputs: Mapped[List["FinancialInput"]] = relationship(
|
||||||
|
"FinancialInput",
|
||||||
|
back_populates="scenario",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
simulation_parameters: Mapped[List["SimulationParameter"]] = relationship(
|
||||||
|
"SimulationParameter",
|
||||||
|
back_populates="scenario",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
profitability_snapshots: Mapped[List["ScenarioProfitability"]] = relationship(
|
||||||
|
"ScenarioProfitability",
|
||||||
|
back_populates="scenario",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ScenarioProfitability.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
capex_snapshots: Mapped[List["ScenarioCapexSnapshot"]] = relationship(
|
||||||
|
"ScenarioCapexSnapshot",
|
||||||
|
back_populates="scenario",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ScenarioCapexSnapshot.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
opex_snapshots: Mapped[List["ScenarioOpexSnapshot"]] = relationship(
|
||||||
|
"ScenarioOpexSnapshot",
|
||||||
|
back_populates="scenario",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by=lambda: ScenarioOpexSnapshot.calculated_at.desc(),
|
||||||
|
passive_deletes=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@validates("currency")
|
||||||
|
def _normalise_currency(self, key: str, value: str | None) -> str | None:
|
||||||
|
# Normalise to uppercase ISO-4217; raises when the code is malformed.
|
||||||
|
return normalise_currency(value)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"Scenario(id={self.id!r}, name={self.name!r}, project_id={self.project_id!r})"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_profitability(self) -> "ScenarioProfitability | None":
|
||||||
|
"""Return the most recent profitability snapshot for this scenario."""
|
||||||
|
|
||||||
|
if not self.profitability_snapshots:
|
||||||
|
return None
|
||||||
|
return self.profitability_snapshots[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_capex(self) -> "ScenarioCapexSnapshot | None":
|
||||||
|
"""Return the most recent capex snapshot for this scenario."""
|
||||||
|
|
||||||
|
if not self.capex_snapshots:
|
||||||
|
return None
|
||||||
|
return self.capex_snapshots[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_opex(self) -> "ScenarioOpexSnapshot | None":
|
||||||
|
"""Return the most recent opex snapshot for this scenario."""
|
||||||
|
|
||||||
|
if not self.opex_snapshots:
|
||||||
|
return None
|
||||||
|
return self.opex_snapshots[0]
|
||||||
|
|||||||
69
models/simulation_parameter.py
Normal file
69
models/simulation_parameter.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from .enums import DistributionType, ResourceType, StochasticVariable, sql_enum
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
JSON,
|
||||||
|
DateTime,
|
||||||
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
String,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from .scenario import Scenario
|
||||||
|
|
||||||
|
|
||||||
|
class SimulationParameter(Base):
|
||||||
|
"""Probability distribution settings for scenario simulations."""
|
||||||
|
|
||||||
|
__tablename__ = "simulation_parameters"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("scenarios.id", ondelete="CASCADE"), nullable=False, index=True
|
||||||
|
)
|
||||||
|
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
|
distribution: Mapped[DistributionType] = mapped_column(
|
||||||
|
sql_enum(DistributionType, name="distributiontype"), nullable=False
|
||||||
|
)
|
||||||
|
variable: Mapped[StochasticVariable | None] = mapped_column(
|
||||||
|
sql_enum(StochasticVariable, name="stochasticvariable"), nullable=True
|
||||||
|
)
|
||||||
|
resource_type: Mapped[ResourceType | None] = mapped_column(
|
||||||
|
sql_enum(ResourceType, name="resourcetype"), nullable=True
|
||||||
|
)
|
||||||
|
mean_value: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 4), nullable=True)
|
||||||
|
standard_deviation: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 4), nullable=True)
|
||||||
|
minimum_value: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 4), nullable=True)
|
||||||
|
maximum_value: Mapped[float | None] = mapped_column(
|
||||||
|
Numeric(18, 4), nullable=True)
|
||||||
|
unit: Mapped[str | None] = mapped_column(String(32), nullable=True)
|
||||||
|
configuration: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario: Mapped["Scenario"] = relationship(
|
||||||
|
"Scenario", back_populates="simulation_parameters"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return (
|
||||||
|
f"SimulationParameter(id={self.id!r}, scenario_id={self.scenario_id!r}, "
|
||||||
|
f"name={self.name!r})"
|
||||||
|
)
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, Float, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class SimulationResult(Base):
|
|
||||||
__tablename__ = "simulation_result"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
scenario_id = Column(Integer, ForeignKey("scenario.id"), nullable=False)
|
|
||||||
iteration = Column(Integer, nullable=False)
|
|
||||||
result = Column(Float, nullable=False)
|
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="simulation_results")
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String
|
|
||||||
|
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
class ThemeSetting(Base):
|
|
||||||
__tablename__ = "theme_settings"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
theme_name = Column(String, unique=True, index=True)
|
|
||||||
primary_color = Column(String)
|
|
||||||
secondary_color = Column(String)
|
|
||||||
accent_color = Column(String)
|
|
||||||
background_color = Column(String)
|
|
||||||
text_color = Column(String)
|
|
||||||
179
models/user.py
179
models/user.py
@@ -1,23 +1,176 @@
|
|||||||
from sqlalchemy import Column, Integer, String, ForeignKey
|
from __future__ import annotations
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
try: # pragma: no cover - defensive compatibility shim
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
import argon2 # type: ignore
|
||||||
|
|
||||||
|
setattr(argon2, "__version__", importlib_metadata.version("argon2-cffi"))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
from sqlalchemy import (
|
||||||
|
Boolean,
|
||||||
|
DateTime,
|
||||||
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
from config.database import Base
|
from config.database import Base
|
||||||
from services.security import get_password_hash, verify_password
|
|
||||||
|
# Configure password hashing strategy. Argon2 provides strong resistance against
|
||||||
|
# GPU-based cracking attempts, aligning with the security plan.
|
||||||
|
password_context = CryptContext(schemes=["argon2"], deprecated="auto")
|
||||||
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
|
"""Authenticated platform user with optional elevated privileges."""
|
||||||
|
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("email", name="uq_users_email"),
|
||||||
|
UniqueConstraint("username", name="uq_users_username"),
|
||||||
|
)
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
username = Column(String, unique=True, index=True)
|
email: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
email = Column(String, unique=True, index=True)
|
username: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||||
hashed_password = Column(String)
|
password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
role_id = Column(Integer, ForeignKey("roles.id"))
|
is_active: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True)
|
||||||
|
is_superuser: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=False)
|
||||||
|
last_login_at: Mapped[datetime | None] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=True
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
role = relationship("Role", back_populates="users")
|
role_assignments: Mapped[List["UserRole"]] = relationship(
|
||||||
|
"UserRole",
|
||||||
|
back_populates="user",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
foreign_keys="UserRole.user_id",
|
||||||
|
)
|
||||||
|
roles: Mapped[List["Role"]] = relationship(
|
||||||
|
"Role",
|
||||||
|
secondary="user_roles",
|
||||||
|
primaryjoin="User.id == UserRole.user_id",
|
||||||
|
secondaryjoin="Role.id == UserRole.role_id",
|
||||||
|
viewonly=True,
|
||||||
|
back_populates="users",
|
||||||
|
)
|
||||||
|
|
||||||
def set_password(self, password: str):
|
def set_password(self, raw_password: str) -> None:
|
||||||
self.hashed_password = get_password_hash(password)
|
"""Hash and store a password for the user."""
|
||||||
|
|
||||||
def check_password(self, password: str) -> bool:
|
self.password_hash = self.hash_password(raw_password)
|
||||||
return verify_password(password, str(self.hashed_password))
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_password(raw_password: str) -> str:
|
||||||
|
"""Return the Argon2 hash for a clear-text password."""
|
||||||
|
|
||||||
|
return password_context.hash(raw_password)
|
||||||
|
|
||||||
|
def verify_password(self, candidate_password: str) -> bool:
|
||||||
|
"""Validate a password against the stored hash."""
|
||||||
|
|
||||||
|
if not self.password_hash:
|
||||||
|
return False
|
||||||
|
return password_context.verify(candidate_password, self.password_hash)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||||
|
return f"User(id={self.id!r}, email={self.email!r})"
|
||||||
|
|
||||||
|
|
||||||
|
class Role(Base):
|
||||||
|
"""Role encapsulating a set of permissions."""
|
||||||
|
|
||||||
|
__tablename__ = "roles"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(64), nullable=False, unique=True)
|
||||||
|
display_name: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||||
|
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
assignments: Mapped[List["UserRole"]] = relationship(
|
||||||
|
"UserRole",
|
||||||
|
back_populates="role",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
foreign_keys="UserRole.role_id",
|
||||||
|
)
|
||||||
|
users: Mapped[List["User"]] = relationship(
|
||||||
|
"User",
|
||||||
|
secondary="user_roles",
|
||||||
|
primaryjoin="Role.id == UserRole.role_id",
|
||||||
|
secondaryjoin="User.id == UserRole.user_id",
|
||||||
|
viewonly=True,
|
||||||
|
back_populates="roles",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover - helpful for debugging
|
||||||
|
return f"Role(id={self.id!r}, name={self.name!r})"
|
||||||
|
|
||||||
|
|
||||||
|
class UserRole(Base):
|
||||||
|
"""Association between users and roles with assignment metadata."""
|
||||||
|
|
||||||
|
__tablename__ = "user_roles"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("user_id", "role_id", name="uq_user_roles_user_role"),
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("users.id", ondelete="CASCADE"),
|
||||||
|
primary_key=True,
|
||||||
|
)
|
||||||
|
role_id: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("roles.id", ondelete="CASCADE"),
|
||||||
|
primary_key=True,
|
||||||
|
)
|
||||||
|
granted_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
granted_by: Mapped[Optional[int]] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("users.id", ondelete="SET NULL"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
user: Mapped["User"] = relationship(
|
||||||
|
"User",
|
||||||
|
foreign_keys=[user_id],
|
||||||
|
back_populates="role_assignments",
|
||||||
|
)
|
||||||
|
role: Mapped["Role"] = relationship(
|
||||||
|
"Role",
|
||||||
|
foreign_keys=[role_id],
|
||||||
|
back_populates="assignments",
|
||||||
|
)
|
||||||
|
granted_by_user: Mapped[Optional["User"]] = relationship(
|
||||||
|
"User",
|
||||||
|
foreign_keys=[granted_by],
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover - debugging helper
|
||||||
|
return f"UserRole(user_id={self.user_id!r}, role_id={self.role_id!r})"
|
||||||
|
|||||||
117
monitoring/__init__.py
Normal file
117
monitoring/__init__.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Query, Response
|
||||||
|
from prometheus_client import CONTENT_TYPE_LATEST, generate_latest
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from config.database import get_db
|
||||||
|
from services.metrics import MetricsService
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/metrics", tags=["monitoring"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", summary="Prometheus metrics endpoint", include_in_schema=False)
|
||||||
|
async def metrics_endpoint() -> Response:
|
||||||
|
payload = generate_latest()
|
||||||
|
return Response(content=payload, media_type=CONTENT_TYPE_LATEST)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/performance", summary="Get performance metrics")
|
||||||
|
async def get_performance_metrics(
|
||||||
|
metric_name: Optional[str] = Query(
|
||||||
|
None, description="Filter by metric name"),
|
||||||
|
hours: int = Query(24, description="Hours back to look"),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
) -> dict:
|
||||||
|
"""Get aggregated performance metrics."""
|
||||||
|
service = MetricsService(db)
|
||||||
|
start_time = datetime.utcnow() - timedelta(hours=hours)
|
||||||
|
|
||||||
|
if metric_name:
|
||||||
|
metrics = service.get_metrics(
|
||||||
|
metric_name=metric_name, start_time=start_time)
|
||||||
|
aggregated = service.get_aggregated_metrics(
|
||||||
|
metric_name, start_time=start_time)
|
||||||
|
return {
|
||||||
|
"metric_name": metric_name,
|
||||||
|
"period_hours": hours,
|
||||||
|
"aggregated": aggregated,
|
||||||
|
"recent_samples": [
|
||||||
|
{
|
||||||
|
"timestamp": m.timestamp.isoformat(),
|
||||||
|
"value": m.value,
|
||||||
|
"labels": m.labels,
|
||||||
|
"endpoint": m.endpoint,
|
||||||
|
"method": m.method,
|
||||||
|
"status_code": m.status_code,
|
||||||
|
"duration_seconds": m.duration_seconds,
|
||||||
|
}
|
||||||
|
for m in metrics[:50] # Last 50 samples
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Return summary for all metrics
|
||||||
|
all_metrics = service.get_metrics(start_time=start_time, limit=1000)
|
||||||
|
metric_types = {}
|
||||||
|
for m in all_metrics:
|
||||||
|
if m.metric_name not in metric_types:
|
||||||
|
metric_types[m.metric_name] = []
|
||||||
|
metric_types[m.metric_name].append(m.value)
|
||||||
|
|
||||||
|
summary = {}
|
||||||
|
for name, values in metric_types.items():
|
||||||
|
summary[name] = {
|
||||||
|
"count": len(values),
|
||||||
|
"avg": sum(values) / len(values) if values else 0,
|
||||||
|
"min": min(values) if values else 0,
|
||||||
|
"max": max(values) if values else 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"period_hours": hours,
|
||||||
|
"summary": summary,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health", summary="Detailed health check with metrics")
|
||||||
|
async def detailed_health(db: Session = Depends(get_db)) -> dict:
|
||||||
|
"""Get detailed health status with recent metrics."""
|
||||||
|
service = MetricsService(db)
|
||||||
|
last_hour = datetime.utcnow() - timedelta(hours=1)
|
||||||
|
|
||||||
|
# Get request metrics from last hour
|
||||||
|
request_metrics = service.get_metrics(
|
||||||
|
metric_name="http_request", start_time=last_hour
|
||||||
|
)
|
||||||
|
|
||||||
|
if request_metrics:
|
||||||
|
durations = []
|
||||||
|
error_count = 0
|
||||||
|
for m in request_metrics:
|
||||||
|
if m.duration_seconds is not None:
|
||||||
|
durations.append(m.duration_seconds)
|
||||||
|
if m.status_code is not None:
|
||||||
|
if m.status_code >= 400:
|
||||||
|
error_count += 1
|
||||||
|
total_requests = len(request_metrics)
|
||||||
|
|
||||||
|
avg_duration = sum(durations) / len(durations) if durations else 0
|
||||||
|
error_rate = error_count / total_requests if total_requests > 0 else 0
|
||||||
|
else:
|
||||||
|
avg_duration = 0
|
||||||
|
error_rate = 0
|
||||||
|
total_requests = 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "ok",
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"metrics": {
|
||||||
|
"requests_last_hour": total_requests,
|
||||||
|
"avg_response_time_seconds": avg_duration,
|
||||||
|
"error_rate": error_rate,
|
||||||
|
},
|
||||||
|
}
|
||||||
108
monitoring/metrics.py
Normal file
108
monitoring/metrics.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
from prometheus_client import Counter, Histogram, Gauge
|
||||||
|
|
||||||
|
IMPORT_DURATION = Histogram(
|
||||||
|
"calminer_import_duration_seconds",
|
||||||
|
"Duration of import preview and commit operations",
|
||||||
|
labelnames=("dataset", "action", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
IMPORT_TOTAL = Counter(
|
||||||
|
"calminer_import_total",
|
||||||
|
"Count of import operations",
|
||||||
|
labelnames=("dataset", "action", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
EXPORT_DURATION = Histogram(
|
||||||
|
"calminer_export_duration_seconds",
|
||||||
|
"Duration of export operations",
|
||||||
|
labelnames=("dataset", "status", "format"),
|
||||||
|
)
|
||||||
|
|
||||||
|
EXPORT_TOTAL = Counter(
|
||||||
|
"calminer_export_total",
|
||||||
|
"Count of export operations",
|
||||||
|
labelnames=("dataset", "status", "format"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# General performance metrics
|
||||||
|
REQUEST_DURATION = Histogram(
|
||||||
|
"calminer_request_duration_seconds",
|
||||||
|
"Duration of HTTP requests",
|
||||||
|
labelnames=("method", "endpoint", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
REQUEST_TOTAL = Counter(
|
||||||
|
"calminer_request_total",
|
||||||
|
"Count of HTTP requests",
|
||||||
|
labelnames=("method", "endpoint", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
ACTIVE_CONNECTIONS = Gauge(
|
||||||
|
"calminer_active_connections",
|
||||||
|
"Number of active connections",
|
||||||
|
)
|
||||||
|
|
||||||
|
DB_CONNECTIONS = Gauge(
|
||||||
|
"calminer_db_connections",
|
||||||
|
"Number of database connections",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Business metrics
|
||||||
|
PROJECT_OPERATIONS = Counter(
|
||||||
|
"calminer_project_operations_total",
|
||||||
|
"Count of project operations",
|
||||||
|
labelnames=("operation", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
SCENARIO_OPERATIONS = Counter(
|
||||||
|
"calminer_scenario_operations_total",
|
||||||
|
"Count of scenario operations",
|
||||||
|
labelnames=("operation", "status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
SIMULATION_RUNS = Counter(
|
||||||
|
"calminer_simulation_runs_total",
|
||||||
|
"Count of Monte Carlo simulation runs",
|
||||||
|
labelnames=("status",),
|
||||||
|
)
|
||||||
|
|
||||||
|
SIMULATION_DURATION = Histogram(
|
||||||
|
"calminer_simulation_duration_seconds",
|
||||||
|
"Duration of Monte Carlo simulations",
|
||||||
|
labelnames=("status",),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def observe_import(action: str, dataset: str, status: str, seconds: float) -> None:
|
||||||
|
IMPORT_TOTAL.labels(dataset=dataset, action=action, status=status).inc()
|
||||||
|
IMPORT_DURATION.labels(dataset=dataset, action=action,
|
||||||
|
status=status).observe(seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def observe_export(dataset: str, status: str, export_format: str, seconds: float) -> None:
|
||||||
|
EXPORT_TOTAL.labels(dataset=dataset, status=status,
|
||||||
|
format=export_format).inc()
|
||||||
|
EXPORT_DURATION.labels(dataset=dataset, status=status,
|
||||||
|
format=export_format).observe(seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def observe_request(method: str, endpoint: str, status: int, seconds: float) -> None:
|
||||||
|
REQUEST_TOTAL.labels(method=method, endpoint=endpoint, status=status).inc()
|
||||||
|
REQUEST_DURATION.labels(method=method, endpoint=endpoint,
|
||||||
|
status=status).observe(seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def observe_project_operation(operation: str, status: str = "success") -> None:
|
||||||
|
PROJECT_OPERATIONS.labels(operation=operation, status=status).inc()
|
||||||
|
|
||||||
|
|
||||||
|
def observe_scenario_operation(operation: str, status: str = "success") -> None:
|
||||||
|
SCENARIO_OPERATIONS.labels(operation=operation, status=status).inc()
|
||||||
|
|
||||||
|
|
||||||
|
def observe_simulation(status: str, duration_seconds: float) -> None:
|
||||||
|
SIMULATION_RUNS.labels(status=status).inc()
|
||||||
|
SIMULATION_DURATION.labels(status=status).observe(duration_seconds)
|
||||||
@@ -14,3 +14,33 @@ exclude = '''
|
|||||||
)/
|
)/
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
pythonpath = ["."]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
addopts = "-ra --strict-config --strict-markers --cov=. --cov-report=term-missing --cov-report=xml --cov-fail-under=80"
|
||||||
|
markers = [
|
||||||
|
"asyncio: marks tests as async (using pytest-asyncio)",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
source = ["."]
|
||||||
|
omit = [
|
||||||
|
"tests/*",
|
||||||
|
"scripts/*",
|
||||||
|
"main.py",
|
||||||
|
"routes/reports.py",
|
||||||
|
"routes/calculations.py",
|
||||||
|
"services/calculations.py",
|
||||||
|
"services/importers.py",
|
||||||
|
"services/reporting.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
skip_empty = true
|
||||||
|
show_missing = true
|
||||||
|
|
||||||
|
[tool.bandit]
|
||||||
|
exclude_dirs = ["scripts"]
|
||||||
|
skips = ["B101", "B601"] # B101: assert_used, B601: shell_injection (may be false positives)
|
||||||
|
|
||||||
|
|||||||
1
requirements-dev.txt
Normal file
1
requirements-dev.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
-r requirements.txt
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
playwright
|
|
||||||
pytest
|
pytest
|
||||||
|
pytest-asyncio
|
||||||
pytest-cov
|
pytest-cov
|
||||||
pytest-httpx
|
pytest-httpx
|
||||||
pytest-playwright
|
|
||||||
python-jose
|
python-jose
|
||||||
ruff
|
ruff
|
||||||
|
black
|
||||||
|
mypy
|
||||||
|
bandit
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
fastapi
|
fastapi
|
||||||
pydantic>=2.0,<3.0
|
pydantic
|
||||||
uvicorn
|
uvicorn
|
||||||
sqlalchemy
|
sqlalchemy
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
@@ -9,4 +9,9 @@ jinja2
|
|||||||
pandas
|
pandas
|
||||||
numpy
|
numpy
|
||||||
passlib
|
passlib
|
||||||
|
argon2-cffi
|
||||||
python-jose
|
python-jose
|
||||||
|
python-multipart
|
||||||
|
openpyxl
|
||||||
|
prometheus-client
|
||||||
|
plotly
|
||||||
1
routes/__init__.py
Normal file
1
routes/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""API route registrations."""
|
||||||
484
routes/auth.py
Normal file
484
routes/auth.py
Normal file
@@ -0,0 +1,484 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Any, Iterable
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request, UploadFile, status
|
||||||
|
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||||
|
from pydantic import ValidationError
|
||||||
|
from starlette.datastructures import FormData
|
||||||
|
|
||||||
|
from dependencies import (
|
||||||
|
get_auth_session,
|
||||||
|
get_jwt_settings,
|
||||||
|
get_session_strategy,
|
||||||
|
get_unit_of_work,
|
||||||
|
require_current_user,
|
||||||
|
)
|
||||||
|
from models import Role, User
|
||||||
|
from schemas.auth import (
|
||||||
|
LoginForm,
|
||||||
|
PasswordResetForm,
|
||||||
|
PasswordResetRequestForm,
|
||||||
|
RegistrationForm,
|
||||||
|
)
|
||||||
|
from services.exceptions import EntityConflictError
|
||||||
|
from services.security import (
|
||||||
|
JWTSettings,
|
||||||
|
TokenDecodeError,
|
||||||
|
TokenExpiredError,
|
||||||
|
TokenTypeMismatchError,
|
||||||
|
create_access_token,
|
||||||
|
create_refresh_token,
|
||||||
|
decode_access_token,
|
||||||
|
hash_password,
|
||||||
|
verify_password,
|
||||||
|
)
|
||||||
|
from services.session import (
|
||||||
|
AuthSession,
|
||||||
|
SessionStrategy,
|
||||||
|
clear_session_cookies,
|
||||||
|
set_session_cookies,
|
||||||
|
)
|
||||||
|
from services.repositories import RoleRepository, UserRepository
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Authentication"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
_PASSWORD_RESET_SCOPE = "password-reset"
|
||||||
|
_AUTH_SCOPE = "auth"
|
||||||
|
|
||||||
|
|
||||||
|
def _template(
|
||||||
|
request: Request,
|
||||||
|
template_name: str,
|
||||||
|
context: dict[str, Any],
|
||||||
|
*,
|
||||||
|
status_code: int = status.HTTP_200_OK,
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
template_name,
|
||||||
|
context,
|
||||||
|
status_code=status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _validation_errors(exc: ValidationError) -> list[str]:
|
||||||
|
return [error.get("msg", "Invalid input.") for error in exc.errors()]
|
||||||
|
|
||||||
|
|
||||||
|
def _scopes(include: Iterable[str]) -> list[str]:
|
||||||
|
return list(include)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalise_form_data(form_data: FormData) -> dict[str, str]:
|
||||||
|
normalised: dict[str, str] = {}
|
||||||
|
for key, value in form_data.multi_items():
|
||||||
|
if isinstance(value, UploadFile):
|
||||||
|
str_value = value.filename or ""
|
||||||
|
else:
|
||||||
|
str_value = str(value)
|
||||||
|
normalised[key] = str_value
|
||||||
|
return normalised
|
||||||
|
|
||||||
|
|
||||||
|
def _require_users_repo(uow: UnitOfWork) -> UserRepository:
|
||||||
|
if not uow.users:
|
||||||
|
raise RuntimeError("User repository is not initialised")
|
||||||
|
return uow.users
|
||||||
|
|
||||||
|
|
||||||
|
def _require_roles_repo(uow: UnitOfWork) -> RoleRepository:
|
||||||
|
if not uow.roles:
|
||||||
|
raise RuntimeError("Role repository is not initialised")
|
||||||
|
return uow.roles
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/login", response_class=HTMLResponse, include_in_schema=False, name="auth.login_form")
|
||||||
|
def login_form(request: Request) -> HTMLResponse:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"login.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.login_submit"),
|
||||||
|
"errors": [],
|
||||||
|
"username": "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/login", include_in_schema=False, name="auth.login_submit")
|
||||||
|
async def login_submit(
|
||||||
|
request: Request,
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||||
|
session_strategy: SessionStrategy = Depends(get_session_strategy),
|
||||||
|
):
|
||||||
|
form_data = _normalise_form_data(await request.form())
|
||||||
|
try:
|
||||||
|
form = LoginForm(**form_data)
|
||||||
|
except ValidationError as exc:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"login.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.login_submit"),
|
||||||
|
"errors": _validation_errors(exc),
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
identifier = form.username
|
||||||
|
users_repo = _require_users_repo(uow)
|
||||||
|
user = _lookup_user(users_repo, identifier)
|
||||||
|
errors: list[str] = []
|
||||||
|
|
||||||
|
if not user or not verify_password(form.password, user.password_hash):
|
||||||
|
errors.append("Invalid username or password.")
|
||||||
|
elif not user.is_active:
|
||||||
|
errors.append("Account is inactive. Contact an administrator.")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"login.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.login_submit"),
|
||||||
|
"errors": errors,
|
||||||
|
"username": identifier,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user is not None # mypy hint - guarded above
|
||||||
|
user.last_login_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
access_token = create_access_token(
|
||||||
|
str(user.id),
|
||||||
|
jwt_settings,
|
||||||
|
scopes=_scopes((_AUTH_SCOPE,)),
|
||||||
|
)
|
||||||
|
refresh_token = create_refresh_token(
|
||||||
|
str(user.id),
|
||||||
|
jwt_settings,
|
||||||
|
scopes=_scopes((_AUTH_SCOPE,)),
|
||||||
|
)
|
||||||
|
|
||||||
|
response = RedirectResponse(
|
||||||
|
request.url_for("dashboard.home"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
set_session_cookies(
|
||||||
|
response,
|
||||||
|
access_token=access_token,
|
||||||
|
refresh_token=refresh_token,
|
||||||
|
strategy=session_strategy,
|
||||||
|
jwt_settings=jwt_settings,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/logout", include_in_schema=False, name="auth.logout")
|
||||||
|
async def logout(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_current_user),
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
session_strategy: SessionStrategy = Depends(get_session_strategy),
|
||||||
|
) -> RedirectResponse:
|
||||||
|
session.mark_cleared()
|
||||||
|
redirect_url = request.url_for(
|
||||||
|
"auth.login_form").include_query_params(logout="1")
|
||||||
|
response = RedirectResponse(
|
||||||
|
redirect_url,
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
clear_session_cookies(response, session_strategy)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def _lookup_user(users_repo: UserRepository, identifier: str) -> User | None:
|
||||||
|
if "@" in identifier:
|
||||||
|
return users_repo.get_by_email(identifier.lower(), with_roles=True)
|
||||||
|
return users_repo.get_by_username(identifier, with_roles=True)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/register", response_class=HTMLResponse, include_in_schema=False, name="auth.register_form")
|
||||||
|
def register_form(request: Request) -> HTMLResponse:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"register.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.register_submit"),
|
||||||
|
"errors": [],
|
||||||
|
"form_data": None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/register", include_in_schema=False, name="auth.register_submit")
|
||||||
|
async def register_submit(
|
||||||
|
request: Request,
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
):
|
||||||
|
form_data = _normalise_form_data(await request.form())
|
||||||
|
try:
|
||||||
|
form = RegistrationForm(**form_data)
|
||||||
|
except ValidationError as exc:
|
||||||
|
return _registration_error_response(request, _validation_errors(exc))
|
||||||
|
|
||||||
|
errors: list[str] = []
|
||||||
|
users_repo = _require_users_repo(uow)
|
||||||
|
roles_repo = _require_roles_repo(uow)
|
||||||
|
uow.ensure_default_roles()
|
||||||
|
|
||||||
|
if users_repo.get_by_email(form.email):
|
||||||
|
errors.append("Email is already registered.")
|
||||||
|
if users_repo.get_by_username(form.username):
|
||||||
|
errors.append("Username is already taken.")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return _registration_error_response(request, errors, form)
|
||||||
|
|
||||||
|
user = User(
|
||||||
|
email=form.email,
|
||||||
|
username=form.username,
|
||||||
|
password_hash=hash_password(form.password),
|
||||||
|
is_active=True,
|
||||||
|
is_superuser=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
created = users_repo.create(user)
|
||||||
|
except EntityConflictError:
|
||||||
|
return _registration_error_response(
|
||||||
|
request,
|
||||||
|
["An account with this username or email already exists."],
|
||||||
|
form,
|
||||||
|
)
|
||||||
|
|
||||||
|
viewer_role = _ensure_viewer_role(roles_repo)
|
||||||
|
if viewer_role is not None:
|
||||||
|
users_repo.assign_role(
|
||||||
|
user_id=created.id,
|
||||||
|
role_id=viewer_role.id,
|
||||||
|
granted_by=created.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
redirect_url = request.url_for(
|
||||||
|
"auth.login_form").include_query_params(registered="1")
|
||||||
|
return RedirectResponse(
|
||||||
|
redirect_url,
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _registration_error_response(
|
||||||
|
request: Request,
|
||||||
|
errors: list[str],
|
||||||
|
form: RegistrationForm | None = None,
|
||||||
|
) -> HTMLResponse:
|
||||||
|
context = {
|
||||||
|
"form_action": request.url_for("auth.register_submit"),
|
||||||
|
"errors": errors,
|
||||||
|
"form_data": form.model_dump(exclude={"password", "confirm_password"}) if form else None,
|
||||||
|
}
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"register.html",
|
||||||
|
context,
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_viewer_role(roles_repo: RoleRepository) -> Role | None:
|
||||||
|
viewer = roles_repo.get_by_name("viewer")
|
||||||
|
if viewer:
|
||||||
|
return viewer
|
||||||
|
return roles_repo.get_by_name("viewer")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/forgot-password",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="auth.password_reset_request_form",
|
||||||
|
)
|
||||||
|
def password_reset_request_form(request: Request) -> HTMLResponse:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"forgot_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||||
|
"errors": [],
|
||||||
|
"message": None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/forgot-password",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="auth.password_reset_request_submit",
|
||||||
|
)
|
||||||
|
async def password_reset_request_submit(
|
||||||
|
request: Request,
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||||
|
):
|
||||||
|
form_data = _normalise_form_data(await request.form())
|
||||||
|
try:
|
||||||
|
form = PasswordResetRequestForm(**form_data)
|
||||||
|
except ValidationError as exc:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"forgot_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||||
|
"errors": _validation_errors(exc),
|
||||||
|
"message": None,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
users_repo = _require_users_repo(uow)
|
||||||
|
user = users_repo.get_by_email(form.email)
|
||||||
|
if not user:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"forgot_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_request_submit"),
|
||||||
|
"errors": [],
|
||||||
|
"message": "If an account exists, a reset link has been sent.",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
token = create_access_token(
|
||||||
|
str(user.id),
|
||||||
|
jwt_settings,
|
||||||
|
scopes=_scopes((_PASSWORD_RESET_SCOPE,)),
|
||||||
|
expires_delta=timedelta(hours=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
reset_url = request.url_for(
|
||||||
|
"auth.password_reset_form").include_query_params(token=token)
|
||||||
|
return RedirectResponse(reset_url, status_code=status.HTTP_303_SEE_OTHER)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/reset-password",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="auth.password_reset_form",
|
||||||
|
)
|
||||||
|
def password_reset_form(
|
||||||
|
request: Request,
|
||||||
|
token: str | None = None,
|
||||||
|
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
errors: list[str] = []
|
||||||
|
if not token:
|
||||||
|
errors.append("Missing password reset token.")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
payload = decode_access_token(token, jwt_settings)
|
||||||
|
if _PASSWORD_RESET_SCOPE not in payload.scopes:
|
||||||
|
errors.append("Invalid token scope.")
|
||||||
|
except TokenExpiredError:
|
||||||
|
errors.append(
|
||||||
|
"Token has expired. Please request a new password reset.")
|
||||||
|
except (TokenDecodeError, TokenTypeMismatchError):
|
||||||
|
errors.append("Invalid password reset token.")
|
||||||
|
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"reset_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_submit"),
|
||||||
|
"token": token,
|
||||||
|
"errors": errors,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST if errors else status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/reset-password",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="auth.password_reset_submit",
|
||||||
|
)
|
||||||
|
async def password_reset_submit(
|
||||||
|
request: Request,
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
jwt_settings: JWTSettings = Depends(get_jwt_settings),
|
||||||
|
):
|
||||||
|
form_data = _normalise_form_data(await request.form())
|
||||||
|
try:
|
||||||
|
form = PasswordResetForm(**form_data)
|
||||||
|
except ValidationError as exc:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"reset_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_submit"),
|
||||||
|
"token": form_data.get("token"),
|
||||||
|
"errors": _validation_errors(exc),
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = decode_access_token(form.token, jwt_settings)
|
||||||
|
except TokenExpiredError:
|
||||||
|
return _reset_error_response(
|
||||||
|
request,
|
||||||
|
form.token,
|
||||||
|
"Token has expired. Please request a new password reset.",
|
||||||
|
)
|
||||||
|
except (TokenDecodeError, TokenTypeMismatchError):
|
||||||
|
return _reset_error_response(
|
||||||
|
request,
|
||||||
|
form.token,
|
||||||
|
"Invalid password reset token.",
|
||||||
|
)
|
||||||
|
|
||||||
|
if _PASSWORD_RESET_SCOPE not in payload.scopes:
|
||||||
|
return _reset_error_response(
|
||||||
|
request,
|
||||||
|
form.token,
|
||||||
|
"Invalid password reset token scope.",
|
||||||
|
)
|
||||||
|
|
||||||
|
users_repo = _require_users_repo(uow)
|
||||||
|
user_id = int(payload.sub)
|
||||||
|
user = users_repo.get(user_id)
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
|
||||||
|
|
||||||
|
user.set_password(form.password)
|
||||||
|
if not user.is_active:
|
||||||
|
user.is_active = True
|
||||||
|
|
||||||
|
redirect_url = request.url_for(
|
||||||
|
"auth.login_form").include_query_params(reset="1")
|
||||||
|
return RedirectResponse(
|
||||||
|
redirect_url,
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _reset_error_response(request: Request, token: str, message: str) -> HTMLResponse:
|
||||||
|
return _template(
|
||||||
|
request,
|
||||||
|
"reset_password.html",
|
||||||
|
{
|
||||||
|
"form_action": request.url_for("auth.password_reset_submit"),
|
||||||
|
"token": token,
|
||||||
|
"errors": [message],
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
2119
routes/calculations.py
Normal file
2119
routes/calculations.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,52 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, status
|
|
||||||
from pydantic import BaseModel, ConfigDict, PositiveFloat, field_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.consumption import Consumption
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/consumption", tags=["Consumption"])
|
|
||||||
|
|
||||||
|
|
||||||
class ConsumptionBase(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
amount: PositiveFloat
|
|
||||||
description: Optional[str] = None
|
|
||||||
unit_name: Optional[str] = None
|
|
||||||
unit_symbol: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator("unit_name", "unit_symbol")
|
|
||||||
@classmethod
|
|
||||||
def _normalize_text(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
stripped = value.strip()
|
|
||||||
return stripped or None
|
|
||||||
|
|
||||||
|
|
||||||
class ConsumptionCreate(ConsumptionBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ConsumptionRead(ConsumptionBase):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/", response_model=ConsumptionRead, status_code=status.HTTP_201_CREATED
|
|
||||||
)
|
|
||||||
def create_consumption(item: ConsumptionCreate, db: Session = Depends(get_db)):
|
|
||||||
db_item = Consumption(**item.model_dump())
|
|
||||||
db.add(db_item)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_item)
|
|
||||||
return db_item
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[ConsumptionRead])
|
|
||||||
def list_consumption(db: Session = Depends(get_db)):
|
|
||||||
return db.query(Consumption).all()
|
|
||||||
121
routes/costs.py
121
routes/costs.py
@@ -1,121 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from pydantic import BaseModel, ConfigDict, field_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.capex import Capex
|
|
||||||
from models.opex import Opex
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/costs", tags=["Costs"])
|
|
||||||
# Pydantic schemas for CAPEX and OPEX
|
|
||||||
|
|
||||||
|
|
||||||
class _CostBase(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
amount: float
|
|
||||||
description: Optional[str] = None
|
|
||||||
currency_code: Optional[str] = "USD"
|
|
||||||
currency_id: Optional[int] = None
|
|
||||||
|
|
||||||
@field_validator("currency_code")
|
|
||||||
@classmethod
|
|
||||||
def _normalize_currency(cls, value: Optional[str]) -> str:
|
|
||||||
code = (value or "USD").strip().upper()
|
|
||||||
return code[:3] if len(code) > 3 else code
|
|
||||||
|
|
||||||
|
|
||||||
class CapexCreate(_CostBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CapexRead(_CostBase):
|
|
||||||
id: int
|
|
||||||
# use from_attributes so Pydantic reads attributes off SQLAlchemy model
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
# optionally include nested currency info
|
|
||||||
currency: Optional["CurrencyRead"] = None
|
|
||||||
|
|
||||||
|
|
||||||
class OpexCreate(_CostBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OpexRead(_CostBase):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
currency: Optional["CurrencyRead"] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyRead(BaseModel):
|
|
||||||
id: int
|
|
||||||
code: str
|
|
||||||
name: Optional[str] = None
|
|
||||||
symbol: Optional[str] = None
|
|
||||||
is_active: Optional[bool] = True
|
|
||||||
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
# forward refs
|
|
||||||
CapexRead.model_rebuild()
|
|
||||||
OpexRead.model_rebuild()
|
|
||||||
|
|
||||||
|
|
||||||
# Capex endpoints
|
|
||||||
@router.post("/capex", response_model=CapexRead)
|
|
||||||
def create_capex(item: CapexCreate, db: Session = Depends(get_db)):
|
|
||||||
payload = item.model_dump()
|
|
||||||
# Prefer explicit currency_id if supplied
|
|
||||||
cid = payload.get("currency_id")
|
|
||||||
if not cid:
|
|
||||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
|
||||||
currency_cls = __import__(
|
|
||||||
"models.currency", fromlist=["Currency"]
|
|
||||||
).Currency
|
|
||||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
|
||||||
if currency is None:
|
|
||||||
currency = currency_cls(code=code, name=code, symbol=None)
|
|
||||||
db.add(currency)
|
|
||||||
db.flush()
|
|
||||||
payload["currency_id"] = currency.id
|
|
||||||
db_item = Capex(**payload)
|
|
||||||
db.add(db_item)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_item)
|
|
||||||
return db_item
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/capex", response_model=List[CapexRead])
|
|
||||||
def list_capex(db: Session = Depends(get_db)):
|
|
||||||
return db.query(Capex).all()
|
|
||||||
|
|
||||||
|
|
||||||
# Opex endpoints
|
|
||||||
@router.post("/opex", response_model=OpexRead)
|
|
||||||
def create_opex(item: OpexCreate, db: Session = Depends(get_db)):
|
|
||||||
payload = item.model_dump()
|
|
||||||
cid = payload.get("currency_id")
|
|
||||||
if not cid:
|
|
||||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
|
||||||
currency_cls = __import__(
|
|
||||||
"models.currency", fromlist=["Currency"]
|
|
||||||
).Currency
|
|
||||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
|
||||||
if currency is None:
|
|
||||||
currency = currency_cls(code=code, name=code, symbol=None)
|
|
||||||
db.add(currency)
|
|
||||||
db.flush()
|
|
||||||
payload["currency_id"] = currency.id
|
|
||||||
db_item = Opex(**payload)
|
|
||||||
db.add(db_item)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_item)
|
|
||||||
return db_item
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/opex", response_model=List[OpexRead])
|
|
||||||
def list_opex(db: Session = Depends(get_db)):
|
|
||||||
return db.query(Opex).all()
|
|
||||||
@@ -1,193 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
|
||||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy.exc import IntegrityError
|
|
||||||
|
|
||||||
from models.currency import Currency
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/currencies", tags=["Currencies"])
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CURRENCY_CODE = "USD"
|
|
||||||
DEFAULT_CURRENCY_NAME = "US Dollar"
|
|
||||||
DEFAULT_CURRENCY_SYMBOL = "$"
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyBase(BaseModel):
|
|
||||||
name: str = Field(..., min_length=1, max_length=128)
|
|
||||||
symbol: Optional[str] = Field(default=None, max_length=8)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _normalize_symbol(value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
value = value.strip()
|
|
||||||
return value or None
|
|
||||||
|
|
||||||
@field_validator("name")
|
|
||||||
@classmethod
|
|
||||||
def _strip_name(cls, value: str) -> str:
|
|
||||||
return value.strip()
|
|
||||||
|
|
||||||
@field_validator("symbol")
|
|
||||||
@classmethod
|
|
||||||
def _strip_symbol(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
return cls._normalize_symbol(value)
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyCreate(CurrencyBase):
|
|
||||||
code: str = Field(..., min_length=3, max_length=3)
|
|
||||||
is_active: bool = True
|
|
||||||
|
|
||||||
@field_validator("code")
|
|
||||||
@classmethod
|
|
||||||
def _normalize_code(cls, value: str) -> str:
|
|
||||||
return value.strip().upper()
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyUpdate(CurrencyBase):
|
|
||||||
is_active: Optional[bool] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyActivation(BaseModel):
|
|
||||||
is_active: bool
|
|
||||||
|
|
||||||
|
|
||||||
class CurrencyRead(CurrencyBase):
|
|
||||||
id: int
|
|
||||||
code: str
|
|
||||||
is_active: bool
|
|
||||||
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_default_currency(db: Session) -> Currency:
|
|
||||||
existing = (
|
|
||||||
db.query(Currency)
|
|
||||||
.filter(Currency.code == DEFAULT_CURRENCY_CODE)
|
|
||||||
.one_or_none()
|
|
||||||
)
|
|
||||||
if existing:
|
|
||||||
return existing
|
|
||||||
|
|
||||||
default_currency = Currency(
|
|
||||||
code=DEFAULT_CURRENCY_CODE,
|
|
||||||
name=DEFAULT_CURRENCY_NAME,
|
|
||||||
symbol=DEFAULT_CURRENCY_SYMBOL,
|
|
||||||
is_active=True,
|
|
||||||
)
|
|
||||||
db.add(default_currency)
|
|
||||||
try:
|
|
||||||
db.commit()
|
|
||||||
except IntegrityError:
|
|
||||||
db.rollback()
|
|
||||||
existing = (
|
|
||||||
db.query(Currency)
|
|
||||||
.filter(Currency.code == DEFAULT_CURRENCY_CODE)
|
|
||||||
.one()
|
|
||||||
)
|
|
||||||
return existing
|
|
||||||
db.refresh(default_currency)
|
|
||||||
return default_currency
|
|
||||||
|
|
||||||
|
|
||||||
def _get_currency_or_404(db: Session, code: str) -> Currency:
|
|
||||||
normalized = code.strip().upper()
|
|
||||||
currency = (
|
|
||||||
db.query(Currency).filter(Currency.code == normalized).one_or_none()
|
|
||||||
)
|
|
||||||
if currency is None:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND, detail="Currency not found"
|
|
||||||
)
|
|
||||||
return currency
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[CurrencyRead])
|
|
||||||
def list_currencies(
|
|
||||||
include_inactive: bool = Query(
|
|
||||||
False, description="Include inactive currencies"
|
|
||||||
),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
):
|
|
||||||
_ensure_default_currency(db)
|
|
||||||
query = db.query(Currency)
|
|
||||||
if not include_inactive:
|
|
||||||
query = query.filter(Currency.is_active.is_(True))
|
|
||||||
currencies = query.order_by(Currency.code).all()
|
|
||||||
return currencies
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/", response_model=CurrencyRead, status_code=status.HTTP_201_CREATED
|
|
||||||
)
|
|
||||||
def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)):
|
|
||||||
code = payload.code
|
|
||||||
existing = db.query(Currency).filter(Currency.code == code).one_or_none()
|
|
||||||
if existing is not None:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_409_CONFLICT,
|
|
||||||
detail=f"Currency '{code}' already exists",
|
|
||||||
)
|
|
||||||
|
|
||||||
currency = Currency(
|
|
||||||
code=code,
|
|
||||||
name=payload.name,
|
|
||||||
symbol=CurrencyBase._normalize_symbol(payload.symbol),
|
|
||||||
is_active=payload.is_active,
|
|
||||||
)
|
|
||||||
db.add(currency)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(currency)
|
|
||||||
return currency
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{code}", response_model=CurrencyRead)
|
|
||||||
def update_currency(
|
|
||||||
code: str, payload: CurrencyUpdate, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
currency = _get_currency_or_404(db, code)
|
|
||||||
|
|
||||||
if payload.name is not None:
|
|
||||||
setattr(currency, "name", payload.name)
|
|
||||||
if payload.symbol is not None or payload.symbol == "":
|
|
||||||
setattr(
|
|
||||||
currency,
|
|
||||||
"symbol",
|
|
||||||
CurrencyBase._normalize_symbol(payload.symbol),
|
|
||||||
)
|
|
||||||
if payload.is_active is not None:
|
|
||||||
code_value = getattr(currency, "code")
|
|
||||||
if code_value == DEFAULT_CURRENCY_CODE and payload.is_active is False:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="The default currency cannot be deactivated.",
|
|
||||||
)
|
|
||||||
setattr(currency, "is_active", payload.is_active)
|
|
||||||
|
|
||||||
db.add(currency)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(currency)
|
|
||||||
return currency
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{code}/activation", response_model=CurrencyRead)
|
|
||||||
def toggle_currency_activation(
|
|
||||||
code: str, body: CurrencyActivation, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
currency = _get_currency_or_404(db, code)
|
|
||||||
code_value = getattr(currency, "code")
|
|
||||||
if code_value == DEFAULT_CURRENCY_CODE and body.is_active is False:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="The default currency cannot be deactivated.",
|
|
||||||
)
|
|
||||||
|
|
||||||
setattr(currency, "is_active", body.is_active)
|
|
||||||
db.add(currency)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(currency)
|
|
||||||
return currency
|
|
||||||
130
routes/dashboard.py
Normal file
130
routes/dashboard.py
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
from dependencies import get_current_user, get_unit_of_work
|
||||||
|
from models import ScenarioStatus, User
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Dashboard"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
|
||||||
|
def _format_timestamp(moment: datetime | None) -> str | None:
|
||||||
|
if moment is None:
|
||||||
|
return None
|
||||||
|
return moment.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
|
||||||
|
def _format_timestamp_with_time(moment: datetime | None) -> str | None:
|
||||||
|
if moment is None:
|
||||||
|
return None
|
||||||
|
return moment.strftime("%Y-%m-%d %H:%M")
|
||||||
|
|
||||||
|
|
||||||
|
def _load_metrics(uow: UnitOfWork) -> dict[str, object]:
|
||||||
|
if not uow.projects or not uow.scenarios or not uow.financial_inputs:
|
||||||
|
raise RuntimeError("UnitOfWork repositories not initialised")
|
||||||
|
total_projects = uow.projects.count()
|
||||||
|
active_scenarios = uow.scenarios.count_by_status(ScenarioStatus.ACTIVE)
|
||||||
|
pending_simulations = uow.scenarios.count_by_status(ScenarioStatus.DRAFT)
|
||||||
|
last_import_at = uow.financial_inputs.latest_created_at()
|
||||||
|
return {
|
||||||
|
"total_projects": total_projects,
|
||||||
|
"active_scenarios": active_scenarios,
|
||||||
|
"pending_simulations": pending_simulations,
|
||||||
|
"last_import": _format_timestamp(last_import_at),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_recent_projects(uow: UnitOfWork) -> list:
|
||||||
|
if not uow.projects:
|
||||||
|
raise RuntimeError("Project repository not initialised")
|
||||||
|
return list(uow.projects.recent(limit=5))
|
||||||
|
|
||||||
|
|
||||||
|
def _load_simulation_updates(uow: UnitOfWork) -> list[dict[str, object]]:
|
||||||
|
updates: list[dict[str, object]] = []
|
||||||
|
if not uow.scenarios:
|
||||||
|
raise RuntimeError("Scenario repository not initialised")
|
||||||
|
scenarios = uow.scenarios.recent(limit=5, with_project=True)
|
||||||
|
for scenario in scenarios:
|
||||||
|
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||||
|
timestamp_label = _format_timestamp_with_time(scenario.updated_at)
|
||||||
|
updates.append(
|
||||||
|
{
|
||||||
|
"title": f"{scenario.name} · {scenario.status.value.title()}",
|
||||||
|
"description": f"Latest update recorded for {project_name}.",
|
||||||
|
"timestamp": scenario.updated_at,
|
||||||
|
"timestamp_label": timestamp_label,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return updates
|
||||||
|
|
||||||
|
|
||||||
|
def _load_scenario_alerts(
|
||||||
|
request: Request, uow: UnitOfWork
|
||||||
|
) -> list[dict[str, object]]:
|
||||||
|
alerts: list[dict[str, object]] = []
|
||||||
|
|
||||||
|
if not uow.scenarios:
|
||||||
|
raise RuntimeError("Scenario repository not initialised")
|
||||||
|
|
||||||
|
drafts = uow.scenarios.list_by_status(
|
||||||
|
ScenarioStatus.DRAFT, limit=3, with_project=True
|
||||||
|
)
|
||||||
|
for scenario in drafts:
|
||||||
|
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||||
|
alerts.append(
|
||||||
|
{
|
||||||
|
"title": f"Draft scenario: {scenario.name}",
|
||||||
|
"message": f"{project_name} has a scenario awaiting validation.",
|
||||||
|
"link": request.url_for(
|
||||||
|
"projects.view_project", project_id=scenario.project_id
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not alerts:
|
||||||
|
archived = uow.scenarios.list_by_status(
|
||||||
|
ScenarioStatus.ARCHIVED, limit=3, with_project=True
|
||||||
|
)
|
||||||
|
for scenario in archived:
|
||||||
|
project_name = scenario.project.name if scenario.project else f"Project #{scenario.project_id}"
|
||||||
|
alerts.append(
|
||||||
|
{
|
||||||
|
"title": f"Archived scenario: {scenario.name}",
|
||||||
|
"message": f"Review archived scenario insights for {project_name}.",
|
||||||
|
"link": request.url_for(
|
||||||
|
"scenarios.view_scenario", scenario_id=scenario.id
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return alerts
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", include_in_schema=False, name="dashboard.home", response_model=None)
|
||||||
|
def dashboard_home(
|
||||||
|
request: Request,
|
||||||
|
user: User | None = Depends(get_current_user),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> HTMLResponse | RedirectResponse:
|
||||||
|
if user is None:
|
||||||
|
return RedirectResponse(request.url_for("auth.login_form"), status_code=303)
|
||||||
|
|
||||||
|
context = {
|
||||||
|
"metrics": _load_metrics(uow),
|
||||||
|
"recent_projects": _load_recent_projects(uow),
|
||||||
|
"simulation_updates": _load_simulation_updates(uow),
|
||||||
|
"scenario_alerts": _load_scenario_alerts(request, uow),
|
||||||
|
"export_modals": {
|
||||||
|
"projects": request.url_for("exports.modal", dataset="projects"),
|
||||||
|
"scenarios": request.url_for("exports.modal", dataset="scenarios"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return templates.TemplateResponse(request, "dashboard.html", context)
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from collections.abc import Generator
|
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from config.database import SessionLocal
|
|
||||||
|
|
||||||
|
|
||||||
def get_db() -> Generator[Session, None, None]:
|
|
||||||
db = SessionLocal()
|
|
||||||
try:
|
|
||||||
yield db
|
|
||||||
finally:
|
|
||||||
db.close()
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.distribution import Distribution
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/distributions", tags=["Distributions"])
|
|
||||||
|
|
||||||
|
|
||||||
class DistributionCreate(BaseModel):
|
|
||||||
name: str
|
|
||||||
distribution_type: str
|
|
||||||
parameters: Dict[str, float | int]
|
|
||||||
|
|
||||||
|
|
||||||
class DistributionRead(DistributionCreate):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=DistributionRead)
|
|
||||||
async def create_distribution(
|
|
||||||
dist: DistributionCreate, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
db_dist = Distribution(**dist.model_dump())
|
|
||||||
db.add(db_dist)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_dist)
|
|
||||||
return db_dist
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[DistributionRead])
|
|
||||||
async def list_distributions(db: Session = Depends(get_db)):
|
|
||||||
dists = db.query(Distribution).all()
|
|
||||||
return dists
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.equipment import Equipment
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/equipment", tags=["Equipment"])
|
|
||||||
# Pydantic schemas
|
|
||||||
|
|
||||||
|
|
||||||
class EquipmentCreate(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
name: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class EquipmentRead(EquipmentCreate):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=EquipmentRead)
|
|
||||||
async def create_equipment(
|
|
||||||
item: EquipmentCreate, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
db_item = Equipment(**item.model_dump())
|
|
||||||
db.add(db_item)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_item)
|
|
||||||
return db_item
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[EquipmentRead])
|
|
||||||
async def list_equipment(db: Session = Depends(get_db)):
|
|
||||||
return db.query(Equipment).all()
|
|
||||||
363
routes/exports.py
Normal file
363
routes/exports.py
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
|
||||||
|
from fastapi.responses import HTMLResponse, StreamingResponse
|
||||||
|
|
||||||
|
from dependencies import get_unit_of_work, require_any_role
|
||||||
|
from schemas.exports import (
|
||||||
|
ExportFormat,
|
||||||
|
ProjectExportRequest,
|
||||||
|
ScenarioExportRequest,
|
||||||
|
)
|
||||||
|
from services.export_serializers import (
|
||||||
|
export_projects_to_excel,
|
||||||
|
export_scenarios_to_excel,
|
||||||
|
stream_projects_to_csv,
|
||||||
|
stream_scenarios_to_csv,
|
||||||
|
)
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from models.import_export_log import ImportExportLog
|
||||||
|
from monitoring.metrics import observe_export
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/exports", tags=["exports"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/modal/{dataset}",
|
||||||
|
response_model=None,
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="exports.modal",
|
||||||
|
)
|
||||||
|
async def export_modal(
|
||||||
|
dataset: str,
|
||||||
|
request: Request,
|
||||||
|
) -> HTMLResponse:
|
||||||
|
dataset = dataset.lower()
|
||||||
|
if dataset not in {"projects", "scenarios"}:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail="Unknown dataset")
|
||||||
|
|
||||||
|
submit_url = request.url_for(
|
||||||
|
"export_projects" if dataset == "projects" else "export_scenarios"
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"exports/modal.html",
|
||||||
|
{
|
||||||
|
"dataset": dataset,
|
||||||
|
"submit_url": submit_url,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _timestamp_suffix() -> str:
|
||||||
|
return datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_repository(repo, name: str):
|
||||||
|
if repo is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"{name} repository unavailable")
|
||||||
|
return repo
|
||||||
|
|
||||||
|
|
||||||
|
def _record_export_audit(
|
||||||
|
*,
|
||||||
|
uow: UnitOfWork,
|
||||||
|
dataset: str,
|
||||||
|
status: str,
|
||||||
|
export_format: ExportFormat,
|
||||||
|
row_count: int,
|
||||||
|
filename: str | None,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
if uow.session is None:
|
||||||
|
return
|
||||||
|
log = ImportExportLog(
|
||||||
|
action="export",
|
||||||
|
dataset=dataset,
|
||||||
|
status=status,
|
||||||
|
filename=filename,
|
||||||
|
row_count=row_count,
|
||||||
|
detail=f"format={export_format.value}",
|
||||||
|
)
|
||||||
|
uow.session.add(log)
|
||||||
|
uow.commit()
|
||||||
|
except Exception:
|
||||||
|
# best-effort auditing, do not break exports
|
||||||
|
if uow.session is not None:
|
||||||
|
uow.session.rollback()
|
||||||
|
logger.exception(
|
||||||
|
"export.audit.failed",
|
||||||
|
extra={
|
||||||
|
"event": "export.audit",
|
||||||
|
"dataset": dataset,
|
||||||
|
"status": status,
|
||||||
|
"format": export_format.value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects",
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
response_class=StreamingResponse,
|
||||||
|
dependencies=[Depends(require_any_role(
|
||||||
|
"admin", "project_manager", "analyst"))],
|
||||||
|
)
|
||||||
|
async def export_projects(
|
||||||
|
request: ProjectExportRequest,
|
||||||
|
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
|
||||||
|
) -> Response:
|
||||||
|
project_repo = _ensure_repository(
|
||||||
|
getattr(uow, "projects", None), "Project")
|
||||||
|
start = time.perf_counter()
|
||||||
|
try:
|
||||||
|
projects = project_repo.filtered_for_export(request.filters)
|
||||||
|
except ValueError as exc:
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="projects",
|
||||||
|
status="failure",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=0,
|
||||||
|
filename=None,
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"export.validation_failed",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "projects",
|
||||||
|
"status": "validation_failed",
|
||||||
|
"format": request.format.value,
|
||||||
|
"error": str(exc),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
except Exception as exc:
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="projects",
|
||||||
|
status="failure",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=0,
|
||||||
|
filename=None,
|
||||||
|
)
|
||||||
|
logger.exception(
|
||||||
|
"export.failed",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "projects",
|
||||||
|
"status": "failure",
|
||||||
|
"format": request.format.value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
filename = f"projects-{_timestamp_suffix()}"
|
||||||
|
|
||||||
|
if request.format == ExportFormat.CSV:
|
||||||
|
stream = stream_projects_to_csv(projects)
|
||||||
|
response = StreamingResponse(stream, media_type="text/csv")
|
||||||
|
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="projects",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=len(projects),
|
||||||
|
filename=f"{filename}.csv",
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"export",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "projects",
|
||||||
|
"status": "success",
|
||||||
|
"format": request.format.value,
|
||||||
|
"row_count": len(projects),
|
||||||
|
"filename": f"{filename}.csv",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
observe_export(
|
||||||
|
dataset="projects",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format.value,
|
||||||
|
seconds=time.perf_counter() - start,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
data = export_projects_to_excel(projects)
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="projects",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=len(projects),
|
||||||
|
filename=f"{filename}.xlsx",
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"export",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "projects",
|
||||||
|
"status": "success",
|
||||||
|
"format": request.format.value,
|
||||||
|
"row_count": len(projects),
|
||||||
|
"filename": f"{filename}.xlsx",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
observe_export(
|
||||||
|
dataset="projects",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format.value,
|
||||||
|
seconds=time.perf_counter() - start,
|
||||||
|
)
|
||||||
|
return StreamingResponse(
|
||||||
|
iter([data]),
|
||||||
|
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f"attachment; filename={filename}.xlsx",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/scenarios",
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
response_class=StreamingResponse,
|
||||||
|
dependencies=[Depends(require_any_role(
|
||||||
|
"admin", "project_manager", "analyst"))],
|
||||||
|
)
|
||||||
|
async def export_scenarios(
|
||||||
|
request: ScenarioExportRequest,
|
||||||
|
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
|
||||||
|
) -> Response:
|
||||||
|
scenario_repo = _ensure_repository(
|
||||||
|
getattr(uow, "scenarios", None), "Scenario")
|
||||||
|
start = time.perf_counter()
|
||||||
|
try:
|
||||||
|
scenarios = scenario_repo.filtered_for_export(
|
||||||
|
request.filters, include_project=True)
|
||||||
|
except ValueError as exc:
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="scenarios",
|
||||||
|
status="failure",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=0,
|
||||||
|
filename=None,
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"export.validation_failed",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "scenarios",
|
||||||
|
"status": "validation_failed",
|
||||||
|
"format": request.format.value,
|
||||||
|
"error": str(exc),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
except Exception as exc:
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="scenarios",
|
||||||
|
status="failure",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=0,
|
||||||
|
filename=None,
|
||||||
|
)
|
||||||
|
logger.exception(
|
||||||
|
"export.failed",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "scenarios",
|
||||||
|
"status": "failure",
|
||||||
|
"format": request.format.value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
filename = f"scenarios-{_timestamp_suffix()}"
|
||||||
|
|
||||||
|
if request.format == ExportFormat.CSV:
|
||||||
|
stream = stream_scenarios_to_csv(scenarios)
|
||||||
|
response = StreamingResponse(stream, media_type="text/csv")
|
||||||
|
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="scenarios",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=len(scenarios),
|
||||||
|
filename=f"{filename}.csv",
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"export",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "scenarios",
|
||||||
|
"status": "success",
|
||||||
|
"format": request.format.value,
|
||||||
|
"row_count": len(scenarios),
|
||||||
|
"filename": f"{filename}.csv",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
observe_export(
|
||||||
|
dataset="scenarios",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format.value,
|
||||||
|
seconds=time.perf_counter() - start,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
data = export_scenarios_to_excel(scenarios)
|
||||||
|
_record_export_audit(
|
||||||
|
uow=uow,
|
||||||
|
dataset="scenarios",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format,
|
||||||
|
row_count=len(scenarios),
|
||||||
|
filename=f"{filename}.xlsx",
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"export",
|
||||||
|
extra={
|
||||||
|
"event": "export",
|
||||||
|
"dataset": "scenarios",
|
||||||
|
"status": "success",
|
||||||
|
"format": request.format.value,
|
||||||
|
"row_count": len(scenarios),
|
||||||
|
"filename": f"{filename}.xlsx",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
observe_export(
|
||||||
|
dataset="scenarios",
|
||||||
|
status="success",
|
||||||
|
export_format=request.format.value,
|
||||||
|
seconds=time.perf_counter() - start,
|
||||||
|
)
|
||||||
|
return StreamingResponse(
|
||||||
|
iter([data]),
|
||||||
|
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f"attachment; filename={filename}.xlsx",
|
||||||
|
},
|
||||||
|
)
|
||||||
170
routes/imports.py
Normal file
170
routes/imports.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import HTMLResponse
|
||||||
|
|
||||||
|
from dependencies import (
|
||||||
|
get_import_ingestion_service,
|
||||||
|
require_roles,
|
||||||
|
require_roles_html,
|
||||||
|
)
|
||||||
|
from models import User
|
||||||
|
from schemas.imports import (
|
||||||
|
ImportCommitRequest,
|
||||||
|
ProjectImportCommitResponse,
|
||||||
|
ProjectImportPreviewResponse,
|
||||||
|
ScenarioImportCommitResponse,
|
||||||
|
ScenarioImportPreviewResponse,
|
||||||
|
)
|
||||||
|
from services.importers import ImportIngestionService, UnsupportedImportFormat
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/imports", tags=["Imports"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
MANAGE_ROLES = ("project_manager", "admin")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="imports.ui",
|
||||||
|
)
|
||||||
|
def import_dashboard(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"imports/ui.html",
|
||||||
|
{
|
||||||
|
"title": "Imports",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _read_upload_file(upload: UploadFile) -> BytesIO:
|
||||||
|
content = await upload.read()
|
||||||
|
if not content:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Uploaded file is empty.",
|
||||||
|
)
|
||||||
|
return BytesIO(content)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects/preview",
|
||||||
|
response_model=ProjectImportPreviewResponse,
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
async def preview_project_import(
|
||||||
|
file: UploadFile = File(...,
|
||||||
|
description="Project import file (CSV or Excel)"),
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
ingestion_service: ImportIngestionService = Depends(
|
||||||
|
get_import_ingestion_service),
|
||||||
|
) -> ProjectImportPreviewResponse:
|
||||||
|
if not file.filename:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Filename is required for import.",
|
||||||
|
)
|
||||||
|
|
||||||
|
stream = await _read_upload_file(file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
preview = ingestion_service.preview_projects(stream, file.filename)
|
||||||
|
except UnsupportedImportFormat as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return ProjectImportPreviewResponse.model_validate(preview)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/scenarios/preview",
|
||||||
|
response_model=ScenarioImportPreviewResponse,
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
async def preview_scenario_import(
|
||||||
|
file: UploadFile = File(...,
|
||||||
|
description="Scenario import file (CSV or Excel)"),
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
ingestion_service: ImportIngestionService = Depends(
|
||||||
|
get_import_ingestion_service),
|
||||||
|
) -> ScenarioImportPreviewResponse:
|
||||||
|
if not file.filename:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Filename is required for import.",
|
||||||
|
)
|
||||||
|
|
||||||
|
stream = await _read_upload_file(file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
preview = ingestion_service.preview_scenarios(stream, file.filename)
|
||||||
|
except UnsupportedImportFormat as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return ScenarioImportPreviewResponse.model_validate(preview)
|
||||||
|
|
||||||
|
|
||||||
|
def _value_error_status(exc: ValueError) -> int:
|
||||||
|
detail = str(exc)
|
||||||
|
if detail.lower().startswith("unknown"):
|
||||||
|
return status.HTTP_404_NOT_FOUND
|
||||||
|
return status.HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects/commit",
|
||||||
|
response_model=ProjectImportCommitResponse,
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
async def commit_project_import_endpoint(
|
||||||
|
payload: ImportCommitRequest,
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
ingestion_service: ImportIngestionService = Depends(
|
||||||
|
get_import_ingestion_service),
|
||||||
|
) -> ProjectImportCommitResponse:
|
||||||
|
try:
|
||||||
|
result = ingestion_service.commit_project_import(payload.token)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=_value_error_status(exc),
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return ProjectImportCommitResponse.model_validate(result)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/scenarios/commit",
|
||||||
|
response_model=ScenarioImportCommitResponse,
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
async def commit_scenario_import_endpoint(
|
||||||
|
payload: ImportCommitRequest,
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
ingestion_service: ImportIngestionService = Depends(
|
||||||
|
get_import_ingestion_service),
|
||||||
|
) -> ScenarioImportCommitResponse:
|
||||||
|
try:
|
||||||
|
result = ingestion_service.commit_scenario_import(payload.token)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=_value_error_status(exc),
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return ScenarioImportCommitResponse.model_validate(result)
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
from datetime import date
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from pydantic import BaseModel, ConfigDict, PositiveFloat
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.maintenance import Maintenance
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/maintenance", tags=["Maintenance"])
|
|
||||||
|
|
||||||
|
|
||||||
class MaintenanceBase(BaseModel):
|
|
||||||
equipment_id: int
|
|
||||||
scenario_id: int
|
|
||||||
maintenance_date: date
|
|
||||||
description: Optional[str] = None
|
|
||||||
cost: PositiveFloat
|
|
||||||
|
|
||||||
|
|
||||||
class MaintenanceCreate(MaintenanceBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MaintenanceUpdate(MaintenanceBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MaintenanceRead(MaintenanceBase):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_maintenance_or_404(db: Session, maintenance_id: int) -> Maintenance:
|
|
||||||
maintenance = (
|
|
||||||
db.query(Maintenance).filter(Maintenance.id == maintenance_id).first()
|
|
||||||
)
|
|
||||||
if maintenance is None:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail=f"Maintenance record {maintenance_id} not found",
|
|
||||||
)
|
|
||||||
return maintenance
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/", response_model=MaintenanceRead, status_code=status.HTTP_201_CREATED
|
|
||||||
)
|
|
||||||
def create_maintenance(
|
|
||||||
maintenance: MaintenanceCreate, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
db_maintenance = Maintenance(**maintenance.model_dump())
|
|
||||||
db.add(db_maintenance)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_maintenance)
|
|
||||||
return db_maintenance
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[MaintenanceRead])
|
|
||||||
def list_maintenance(
|
|
||||||
skip: int = 0, limit: int = 100, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
return db.query(Maintenance).offset(skip).limit(limit).all()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{maintenance_id}", response_model=MaintenanceRead)
|
|
||||||
def get_maintenance(maintenance_id: int, db: Session = Depends(get_db)):
|
|
||||||
return _get_maintenance_or_404(db, maintenance_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{maintenance_id}", response_model=MaintenanceRead)
|
|
||||||
def update_maintenance(
|
|
||||||
maintenance_id: int,
|
|
||||||
payload: MaintenanceUpdate,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
):
|
|
||||||
db_maintenance = _get_maintenance_or_404(db, maintenance_id)
|
|
||||||
for field, value in payload.model_dump().items():
|
|
||||||
setattr(db_maintenance, field, value)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_maintenance)
|
|
||||||
return db_maintenance
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{maintenance_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
def delete_maintenance(maintenance_id: int, db: Session = Depends(get_db)):
|
|
||||||
db_maintenance = _get_maintenance_or_404(db, maintenance_id)
|
|
||||||
db.delete(db_maintenance)
|
|
||||||
db.commit()
|
|
||||||
63
routes/navigation.py
Normal file
63
routes/navigation.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
|
||||||
|
from dependencies import (
|
||||||
|
get_auth_session,
|
||||||
|
get_navigation_service,
|
||||||
|
require_authenticated_user,
|
||||||
|
)
|
||||||
|
from models import User
|
||||||
|
from schemas.navigation import (
|
||||||
|
NavigationGroupSchema,
|
||||||
|
NavigationLinkSchema,
|
||||||
|
NavigationSidebarResponse,
|
||||||
|
)
|
||||||
|
from services.navigation import NavigationGroupDTO, NavigationLinkDTO, NavigationService
|
||||||
|
from services.session import AuthSession
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/navigation", tags=["Navigation"])
|
||||||
|
|
||||||
|
|
||||||
|
def _to_link_schema(dto: NavigationLinkDTO) -> NavigationLinkSchema:
|
||||||
|
return NavigationLinkSchema(
|
||||||
|
id=dto.id,
|
||||||
|
label=dto.label,
|
||||||
|
href=dto.href,
|
||||||
|
match_prefix=dto.match_prefix,
|
||||||
|
icon=dto.icon,
|
||||||
|
tooltip=dto.tooltip,
|
||||||
|
is_external=dto.is_external,
|
||||||
|
children=[_to_link_schema(child) for child in dto.children],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _to_group_schema(dto: NavigationGroupDTO) -> NavigationGroupSchema:
|
||||||
|
return NavigationGroupSchema(
|
||||||
|
id=dto.id,
|
||||||
|
label=dto.label,
|
||||||
|
icon=dto.icon,
|
||||||
|
tooltip=dto.tooltip,
|
||||||
|
links=[_to_link_schema(link) for link in dto.links],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/sidebar",
|
||||||
|
response_model=NavigationSidebarResponse,
|
||||||
|
name="navigation.sidebar",
|
||||||
|
)
|
||||||
|
async def get_sidebar_navigation(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_authenticated_user),
|
||||||
|
session: AuthSession = Depends(get_auth_session),
|
||||||
|
service: NavigationService = Depends(get_navigation_service),
|
||||||
|
) -> NavigationSidebarResponse:
|
||||||
|
dto = service.build_sidebar(session=session, request=request)
|
||||||
|
return NavigationSidebarResponse(
|
||||||
|
groups=[_to_group_schema(group) for group in dto.groups],
|
||||||
|
roles=list(dto.roles),
|
||||||
|
generated_at=datetime.now(tz=timezone.utc),
|
||||||
|
)
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
|
||||||
from pydantic import BaseModel, ConfigDict, field_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.distribution import Distribution
|
|
||||||
from models.parameters import Parameter
|
|
||||||
from models.scenario import Scenario
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/parameters", tags=["parameters"])
|
|
||||||
|
|
||||||
|
|
||||||
class ParameterCreate(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
name: str
|
|
||||||
value: float
|
|
||||||
distribution_id: Optional[int] = None
|
|
||||||
distribution_type: Optional[str] = None
|
|
||||||
distribution_parameters: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
@field_validator("distribution_type")
|
|
||||||
@classmethod
|
|
||||||
def normalize_type(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None:
|
|
||||||
return value
|
|
||||||
normalized = value.strip().lower()
|
|
||||||
if not normalized:
|
|
||||||
return None
|
|
||||||
if normalized not in {"normal", "uniform", "triangular"}:
|
|
||||||
raise ValueError(
|
|
||||||
"distribution_type must be normal, uniform, or triangular"
|
|
||||||
)
|
|
||||||
return normalized
|
|
||||||
|
|
||||||
@field_validator("distribution_parameters")
|
|
||||||
@classmethod
|
|
||||||
def empty_dict_to_none(
|
|
||||||
cls, value: Optional[Dict[str, Any]]
|
|
||||||
) -> Optional[Dict[str, Any]]:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return value or None
|
|
||||||
|
|
||||||
|
|
||||||
class ParameterRead(ParameterCreate):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ParameterRead)
|
|
||||||
def create_parameter(param: ParameterCreate, db: Session = Depends(get_db)):
|
|
||||||
scen = db.query(Scenario).filter(Scenario.id == param.scenario_id).first()
|
|
||||||
if not scen:
|
|
||||||
raise HTTPException(status_code=404, detail="Scenario not found")
|
|
||||||
distribution_id = param.distribution_id
|
|
||||||
distribution_type = param.distribution_type
|
|
||||||
distribution_parameters = param.distribution_parameters
|
|
||||||
|
|
||||||
if distribution_id is not None:
|
|
||||||
distribution = (
|
|
||||||
db.query(Distribution)
|
|
||||||
.filter(Distribution.id == distribution_id)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if not distribution:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=404, detail="Distribution not found"
|
|
||||||
)
|
|
||||||
distribution_type = distribution.distribution_type
|
|
||||||
distribution_parameters = distribution.parameters or None
|
|
||||||
|
|
||||||
new_param = Parameter(
|
|
||||||
scenario_id=param.scenario_id,
|
|
||||||
name=param.name,
|
|
||||||
value=param.value,
|
|
||||||
distribution_id=distribution_id,
|
|
||||||
distribution_type=distribution_type,
|
|
||||||
distribution_parameters=distribution_parameters,
|
|
||||||
)
|
|
||||||
db.add(new_param)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(new_param)
|
|
||||||
return new_param
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[ParameterRead])
|
|
||||||
def list_parameters(db: Session = Depends(get_db)):
|
|
||||||
return db.query(Parameter).all()
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, status
|
|
||||||
from pydantic import BaseModel, ConfigDict, PositiveFloat, field_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.production_output import ProductionOutput
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/production", tags=["Production"])
|
|
||||||
|
|
||||||
|
|
||||||
class ProductionOutputBase(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
amount: PositiveFloat
|
|
||||||
description: Optional[str] = None
|
|
||||||
unit_name: Optional[str] = None
|
|
||||||
unit_symbol: Optional[str] = None
|
|
||||||
|
|
||||||
@field_validator("unit_name", "unit_symbol")
|
|
||||||
@classmethod
|
|
||||||
def _normalize_text(cls, value: Optional[str]) -> Optional[str]:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
stripped = value.strip()
|
|
||||||
return stripped or None
|
|
||||||
|
|
||||||
|
|
||||||
class ProductionOutputCreate(ProductionOutputBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProductionOutputRead(ProductionOutputBase):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/",
|
|
||||||
response_model=ProductionOutputRead,
|
|
||||||
status_code=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
def create_production(
|
|
||||||
item: ProductionOutputCreate, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
db_item = ProductionOutput(**item.model_dump())
|
|
||||||
db.add(db_item)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_item)
|
|
||||||
return db_item
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[ProductionOutputRead])
|
|
||||||
def list_production(db: Session = Depends(get_db)):
|
|
||||||
return db.query(ProductionOutput).all()
|
|
||||||
337
routes/projects.py
Normal file
337
routes/projects.py
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
|
||||||
|
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||||
|
|
||||||
|
from dependencies import (
|
||||||
|
get_pricing_metadata,
|
||||||
|
get_unit_of_work,
|
||||||
|
require_any_role,
|
||||||
|
require_any_role_html,
|
||||||
|
require_project_resource,
|
||||||
|
require_project_resource_html,
|
||||||
|
require_roles,
|
||||||
|
require_roles_html,
|
||||||
|
)
|
||||||
|
from models import MiningOperationType, Project, ScenarioStatus, User
|
||||||
|
from schemas.project import ProjectCreate, ProjectRead, ProjectUpdate
|
||||||
|
from services.exceptions import EntityConflictError
|
||||||
|
from services.pricing import PricingMetadata
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/projects", tags=["Projects"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
READ_ROLES = ("viewer", "analyst", "project_manager", "admin")
|
||||||
|
MANAGE_ROLES = ("project_manager", "admin")
|
||||||
|
|
||||||
|
|
||||||
|
def _to_read_model(project: Project) -> ProjectRead:
|
||||||
|
return ProjectRead.model_validate(project)
|
||||||
|
|
||||||
|
|
||||||
|
def _require_project_repo(uow: UnitOfWork):
|
||||||
|
if not uow.projects:
|
||||||
|
raise RuntimeError("Project repository not initialised")
|
||||||
|
return uow.projects
|
||||||
|
|
||||||
|
|
||||||
|
def _operation_type_choices() -> list[tuple[str, str]]:
|
||||||
|
return [
|
||||||
|
(op.value, op.name.replace("_", " ").title()) for op in MiningOperationType
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=List[ProjectRead])
|
||||||
|
def list_projects(
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> List[ProjectRead]:
|
||||||
|
projects = _require_project_repo(uow).list()
|
||||||
|
return [_to_read_model(project) for project in projects]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=ProjectRead, status_code=status.HTTP_201_CREATED)
|
||||||
|
def create_project(
|
||||||
|
payload: ProjectCreate,
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
) -> ProjectRead:
|
||||||
|
project = Project(**payload.model_dump())
|
||||||
|
try:
|
||||||
|
created = _require_project_repo(uow).create(project)
|
||||||
|
except EntityConflictError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
default_settings = uow.ensure_default_pricing_settings(
|
||||||
|
metadata=metadata).settings
|
||||||
|
uow.set_project_pricing_settings(created, default_settings)
|
||||||
|
return _to_read_model(created)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.project_list_page",
|
||||||
|
)
|
||||||
|
def project_list_page(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
projects = _require_project_repo(uow).list(with_children=True)
|
||||||
|
for project in projects:
|
||||||
|
setattr(project, "scenario_count", len(project.scenarios))
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/list.html",
|
||||||
|
{
|
||||||
|
"projects": projects,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/create",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.create_project_form",
|
||||||
|
)
|
||||||
|
def create_project_form(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/form.html",
|
||||||
|
{
|
||||||
|
"project": None,
|
||||||
|
"operation_types": _operation_type_choices(),
|
||||||
|
"form_action": request.url_for("projects.create_project_submit"),
|
||||||
|
"cancel_url": request.url_for("projects.project_list_page"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/create",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.create_project_submit",
|
||||||
|
)
|
||||||
|
def create_project_submit(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
name: str = Form(...),
|
||||||
|
location: str | None = Form(None),
|
||||||
|
operation_type: str = Form(...),
|
||||||
|
description: str | None = Form(None),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
):
|
||||||
|
def _normalise(value: str | None) -> str | None:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
value = value.strip()
|
||||||
|
return value or None
|
||||||
|
|
||||||
|
try:
|
||||||
|
op_type = MiningOperationType(operation_type)
|
||||||
|
except ValueError:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/form.html",
|
||||||
|
{
|
||||||
|
"project": None,
|
||||||
|
"operation_types": _operation_type_choices(),
|
||||||
|
"form_action": request.url_for("projects.create_project_submit"),
|
||||||
|
"cancel_url": request.url_for("projects.project_list_page"),
|
||||||
|
"error": "Invalid operation type.",
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
project = Project(
|
||||||
|
name=name.strip(),
|
||||||
|
location=_normalise(location),
|
||||||
|
operation_type=op_type,
|
||||||
|
description=_normalise(description),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
created = _require_project_repo(uow).create(project)
|
||||||
|
except EntityConflictError:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"operation_types": _operation_type_choices(),
|
||||||
|
"form_action": request.url_for("projects.create_project_submit"),
|
||||||
|
"cancel_url": request.url_for("projects.project_list_page"),
|
||||||
|
"error": "Project with this name already exists.",
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
default_settings = uow.ensure_default_pricing_settings(
|
||||||
|
metadata=metadata).settings
|
||||||
|
uow.set_project_pricing_settings(created, default_settings)
|
||||||
|
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("projects.project_list_page"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{project_id}", response_model=ProjectRead)
|
||||||
|
def get_project(project: Project = Depends(require_project_resource())) -> ProjectRead:
|
||||||
|
return _to_read_model(project)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{project_id}", response_model=ProjectRead)
|
||||||
|
def update_project(
|
||||||
|
payload: ProjectUpdate,
|
||||||
|
project: Project = Depends(
|
||||||
|
require_project_resource(require_manage=True)
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> ProjectRead:
|
||||||
|
update_data = payload.model_dump(exclude_unset=True)
|
||||||
|
for field, value in update_data.items():
|
||||||
|
setattr(project, field, value)
|
||||||
|
|
||||||
|
uow.flush()
|
||||||
|
return _to_read_model(project)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
def delete_project(
|
||||||
|
project: Project = Depends(require_project_resource(require_manage=True)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> None:
|
||||||
|
_require_project_repo(uow).delete(project.id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/{project_id}/view",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.view_project",
|
||||||
|
)
|
||||||
|
def view_project(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
project: Project = Depends(require_project_resource_html()),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
project = _require_project_repo(uow).get(project.id, with_children=True)
|
||||||
|
|
||||||
|
scenarios = sorted(project.scenarios, key=lambda s: s.created_at)
|
||||||
|
scenario_stats = {
|
||||||
|
"total": len(scenarios),
|
||||||
|
"active": sum(1 for scenario in scenarios if scenario.status == ScenarioStatus.ACTIVE),
|
||||||
|
"draft": sum(1 for scenario in scenarios if scenario.status == ScenarioStatus.DRAFT),
|
||||||
|
"archived": sum(1 for scenario in scenarios if scenario.status == ScenarioStatus.ARCHIVED),
|
||||||
|
"latest_update": max(
|
||||||
|
(scenario.updated_at for scenario in scenarios if scenario.updated_at),
|
||||||
|
default=None,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/detail.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenarios": scenarios,
|
||||||
|
"scenario_stats": scenario_stats,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/{project_id}/edit",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.edit_project_form",
|
||||||
|
)
|
||||||
|
def edit_project_form(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
project: Project = Depends(
|
||||||
|
require_project_resource_html(require_manage=True)
|
||||||
|
),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"operation_types": _operation_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"projects.edit_project_submit", project_id=project.id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"projects.view_project", project_id=project.id
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/{project_id}/edit",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="projects.edit_project_submit",
|
||||||
|
)
|
||||||
|
def edit_project_submit(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
project: Project = Depends(
|
||||||
|
require_project_resource_html(require_manage=True)
|
||||||
|
),
|
||||||
|
name: str = Form(...),
|
||||||
|
location: str | None = Form(None),
|
||||||
|
operation_type: str | None = Form(None),
|
||||||
|
description: str | None = Form(None),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
):
|
||||||
|
def _normalise(value: str | None) -> str | None:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
value = value.strip()
|
||||||
|
return value or None
|
||||||
|
|
||||||
|
project.name = name.strip()
|
||||||
|
project.location = _normalise(location)
|
||||||
|
if operation_type:
|
||||||
|
try:
|
||||||
|
project.operation_type = MiningOperationType(operation_type)
|
||||||
|
except ValueError:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"projects/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"operation_types": _operation_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"projects.edit_project_submit", project_id=project.id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"projects.view_project", project_id=project.id
|
||||||
|
),
|
||||||
|
"error": "Invalid operation type.",
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
project.description = _normalise(description)
|
||||||
|
|
||||||
|
uow.flush()
|
||||||
|
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("projects.view_project", project_id=project.id),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
from typing import Any, Dict, List, cast
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Request, status
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from services.reporting import generate_report
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/reporting", tags=["Reporting"])
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_payload(payload: Any) -> List[Dict[str, float]]:
|
|
||||||
if not isinstance(payload, list):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Invalid input format",
|
|
||||||
)
|
|
||||||
|
|
||||||
typed_payload = cast(List[Any], payload)
|
|
||||||
|
|
||||||
validated: List[Dict[str, float]] = []
|
|
||||||
for index, item in enumerate(typed_payload):
|
|
||||||
if not isinstance(item, dict):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Entry at index {index} must be an object",
|
|
||||||
)
|
|
||||||
value = cast(Dict[str, Any], item).get("result")
|
|
||||||
if not isinstance(value, (int, float)):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Entry at index {index} must include numeric 'result'",
|
|
||||||
)
|
|
||||||
validated.append({"result": float(value)})
|
|
||||||
return validated
|
|
||||||
|
|
||||||
|
|
||||||
class ReportSummary(BaseModel):
|
|
||||||
count: int
|
|
||||||
mean: float
|
|
||||||
median: float
|
|
||||||
min: float
|
|
||||||
max: float
|
|
||||||
std_dev: float
|
|
||||||
variance: float
|
|
||||||
percentile_10: float
|
|
||||||
percentile_90: float
|
|
||||||
percentile_5: float
|
|
||||||
percentile_95: float
|
|
||||||
value_at_risk_95: float
|
|
||||||
expected_shortfall_95: float
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/summary", response_model=ReportSummary)
|
|
||||||
async def summary_report(request: Request):
|
|
||||||
payload = await request.json()
|
|
||||||
validated_payload = _validate_payload(payload)
|
|
||||||
summary = generate_report(validated_payload)
|
|
||||||
return ReportSummary(
|
|
||||||
count=int(summary["count"]),
|
|
||||||
mean=float(summary["mean"]),
|
|
||||||
median=float(summary["median"]),
|
|
||||||
min=float(summary["min"]),
|
|
||||||
max=float(summary["max"]),
|
|
||||||
std_dev=float(summary["std_dev"]),
|
|
||||||
variance=float(summary["variance"]),
|
|
||||||
percentile_10=float(summary["percentile_10"]),
|
|
||||||
percentile_90=float(summary["percentile_90"]),
|
|
||||||
percentile_5=float(summary["percentile_5"]),
|
|
||||||
percentile_95=float(summary["percentile_95"]),
|
|
||||||
value_at_risk_95=float(summary["value_at_risk_95"]),
|
|
||||||
expected_shortfall_95=float(summary["expected_shortfall_95"]),
|
|
||||||
)
|
|
||||||
434
routes/reports.py
Normal file
434
routes/reports.py
Normal file
@@ -0,0 +1,434 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
|
||||||
|
from fastapi.encoders import jsonable_encoder
|
||||||
|
from fastapi.responses import HTMLResponse
|
||||||
|
|
||||||
|
from dependencies import (
|
||||||
|
get_unit_of_work,
|
||||||
|
require_any_role,
|
||||||
|
require_any_role_html,
|
||||||
|
require_project_resource,
|
||||||
|
require_scenario_resource,
|
||||||
|
require_project_resource_html,
|
||||||
|
require_scenario_resource_html,
|
||||||
|
)
|
||||||
|
from models import Project, Scenario, User
|
||||||
|
from services.exceptions import EntityNotFoundError, ScenarioValidationError
|
||||||
|
from services.reporting import (
|
||||||
|
DEFAULT_ITERATIONS,
|
||||||
|
IncludeOptions,
|
||||||
|
ReportFilters,
|
||||||
|
ReportingService,
|
||||||
|
parse_include_tokens,
|
||||||
|
validate_percentiles,
|
||||||
|
)
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/reports", tags=["Reports"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
READ_ROLES = ("viewer", "analyst", "project_manager", "admin")
|
||||||
|
MANAGE_ROLES = ("project_manager", "admin")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/projects/{project_id}", name="reports.project_summary")
|
||||||
|
def project_summary_report(
|
||||||
|
project: Project = Depends(require_project_resource()),
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (distribution,samples,all).",
|
||||||
|
),
|
||||||
|
scenario_ids: list[int] | None = Query(
|
||||||
|
None,
|
||||||
|
alias="scenario_ids",
|
||||||
|
description="Repeatable scenario identifier filter.",
|
||||||
|
),
|
||||||
|
start_date: date | None = Query(
|
||||||
|
None,
|
||||||
|
description="Filter scenarios starting on or after this date.",
|
||||||
|
),
|
||||||
|
end_date: date | None = Query(
|
||||||
|
None,
|
||||||
|
description="Filter scenarios ending on or before this date.",
|
||||||
|
),
|
||||||
|
fmt: str = Query(
|
||||||
|
"json",
|
||||||
|
alias="format",
|
||||||
|
description="Response format (json only for this endpoint).",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count when distribution is included.",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries when included.",
|
||||||
|
),
|
||||||
|
) -> dict[str, object]:
|
||||||
|
if fmt.lower() != "json":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="Only JSON responses are supported; use the HTML endpoint for templates.",
|
||||||
|
)
|
||||||
|
|
||||||
|
include_options = parse_include_tokens(include)
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
scenario_filter = ReportFilters(
|
||||||
|
scenario_ids=set(scenario_ids) if scenario_ids else None,
|
||||||
|
start_date=start_date,
|
||||||
|
end_date=end_date,
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
report = service.project_summary(
|
||||||
|
project,
|
||||||
|
filters=scenario_filter,
|
||||||
|
include=include_options,
|
||||||
|
iterations=iterations or DEFAULT_ITERATIONS,
|
||||||
|
percentiles=percentile_values,
|
||||||
|
)
|
||||||
|
return jsonable_encoder(report)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/scenarios/compare",
|
||||||
|
name="reports.project_scenario_comparison",
|
||||||
|
)
|
||||||
|
def project_scenario_comparison_report(
|
||||||
|
project: Project = Depends(require_project_resource()),
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
scenario_ids: list[int] = Query(
|
||||||
|
..., alias="scenario_ids", description="Repeatable scenario identifier."),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (distribution,samples,all).",
|
||||||
|
),
|
||||||
|
fmt: str = Query(
|
||||||
|
"json",
|
||||||
|
alias="format",
|
||||||
|
description="Response format (json only for this endpoint).",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count when distribution is included.",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries when included.",
|
||||||
|
),
|
||||||
|
) -> dict[str, object]:
|
||||||
|
unique_ids = list(dict.fromkeys(scenario_ids))
|
||||||
|
if len(unique_ids) < 2:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail="At least two unique scenario_ids must be provided for comparison.",
|
||||||
|
)
|
||||||
|
if fmt.lower() != "json":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="Only JSON responses are supported; use the HTML endpoint for templates.",
|
||||||
|
)
|
||||||
|
|
||||||
|
include_options = parse_include_tokens(include)
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
scenarios = uow.validate_scenarios_for_comparison(unique_ids)
|
||||||
|
except ScenarioValidationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail={
|
||||||
|
"code": exc.code,
|
||||||
|
"message": exc.message,
|
||||||
|
"scenario_ids": list(exc.scenario_ids or []),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
if any(scenario.project_id != project.id for scenario in scenarios):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="One or more scenarios are not associated with this project.",
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
report = service.scenario_comparison(
|
||||||
|
project,
|
||||||
|
scenarios,
|
||||||
|
include=include_options,
|
||||||
|
iterations=iterations or DEFAULT_ITERATIONS,
|
||||||
|
percentiles=percentile_values,
|
||||||
|
)
|
||||||
|
return jsonable_encoder(report)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/scenarios/{scenario_id}/distribution",
|
||||||
|
name="reports.scenario_distribution",
|
||||||
|
)
|
||||||
|
def scenario_distribution_report(
|
||||||
|
scenario: Scenario = Depends(require_scenario_resource()),
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (samples,all).",
|
||||||
|
),
|
||||||
|
fmt: str = Query(
|
||||||
|
"json",
|
||||||
|
alias="format",
|
||||||
|
description="Response format (json only for this endpoint).",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count (default applies otherwise).",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries.",
|
||||||
|
),
|
||||||
|
) -> dict[str, object]:
|
||||||
|
if fmt.lower() != "json":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="Only JSON responses are supported; use the HTML endpoint for templates.",
|
||||||
|
)
|
||||||
|
|
||||||
|
requested = parse_include_tokens(include)
|
||||||
|
include_options = IncludeOptions(
|
||||||
|
distribution=True, samples=requested.samples)
|
||||||
|
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
report = service.scenario_distribution(
|
||||||
|
scenario,
|
||||||
|
include=include_options,
|
||||||
|
iterations=iterations or DEFAULT_ITERATIONS,
|
||||||
|
percentiles=percentile_values,
|
||||||
|
)
|
||||||
|
return jsonable_encoder(report)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="reports.project_summary_page",
|
||||||
|
)
|
||||||
|
def project_summary_page(
|
||||||
|
request: Request,
|
||||||
|
project: Project = Depends(require_project_resource_html()),
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (distribution,samples,all).",
|
||||||
|
),
|
||||||
|
scenario_ids: list[int] | None = Query(
|
||||||
|
None,
|
||||||
|
alias="scenario_ids",
|
||||||
|
description="Repeatable scenario identifier filter.",
|
||||||
|
),
|
||||||
|
start_date: date | None = Query(
|
||||||
|
None,
|
||||||
|
description="Filter scenarios starting on or after this date.",
|
||||||
|
),
|
||||||
|
end_date: date | None = Query(
|
||||||
|
None,
|
||||||
|
description="Filter scenarios ending on or before this date.",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count when distribution is included.",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries when included.",
|
||||||
|
),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
include_options = parse_include_tokens(include)
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
scenario_filter = ReportFilters(
|
||||||
|
scenario_ids=set(scenario_ids) if scenario_ids else None,
|
||||||
|
start_date=start_date,
|
||||||
|
end_date=end_date,
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
context = service.build_project_summary_context(
|
||||||
|
project, scenario_filter, include_options, iterations or DEFAULT_ITERATIONS, percentile_values, request
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"reports/project_summary.html",
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/scenarios/compare/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="reports.project_scenario_comparison_page",
|
||||||
|
)
|
||||||
|
def project_scenario_comparison_page(
|
||||||
|
request: Request,
|
||||||
|
project: Project = Depends(require_project_resource_html()),
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
scenario_ids: list[int] = Query(
|
||||||
|
..., alias="scenario_ids", description="Repeatable scenario identifier."),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (distribution,samples,all).",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count when distribution is included.",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries when included.",
|
||||||
|
),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
unique_ids = list(dict.fromkeys(scenario_ids))
|
||||||
|
if len(unique_ids) < 2:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail="At least two unique scenario_ids must be provided for comparison.",
|
||||||
|
)
|
||||||
|
|
||||||
|
include_options = parse_include_tokens(include)
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
scenarios = uow.validate_scenarios_for_comparison(unique_ids)
|
||||||
|
except ScenarioValidationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail={
|
||||||
|
"code": exc.code,
|
||||||
|
"message": exc.message,
|
||||||
|
"scenario_ids": list(exc.scenario_ids or []),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
if any(scenario.project_id != project.id for scenario in scenarios):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="One or more scenarios are not associated with this project.",
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
context = service.build_scenario_comparison_context(
|
||||||
|
project, scenarios, include_options, iterations or DEFAULT_ITERATIONS, percentile_values, request
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"reports/scenario_comparison.html",
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/scenarios/{scenario_id}/distribution/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="reports.scenario_distribution_page",
|
||||||
|
)
|
||||||
|
def scenario_distribution_page(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource_html()
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
include: str | None = Query(
|
||||||
|
None,
|
||||||
|
description="Comma-separated include tokens (samples,all).",
|
||||||
|
),
|
||||||
|
iterations: int | None = Query(
|
||||||
|
None,
|
||||||
|
gt=0,
|
||||||
|
description="Override Monte Carlo iteration count (default applies otherwise).",
|
||||||
|
),
|
||||||
|
percentiles: list[float] | None = Query(
|
||||||
|
None,
|
||||||
|
description="Percentiles (0-100) for Monte Carlo summaries.",
|
||||||
|
),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
requested = parse_include_tokens(include)
|
||||||
|
include_options = IncludeOptions(
|
||||||
|
distribution=True, samples=requested.samples)
|
||||||
|
|
||||||
|
try:
|
||||||
|
percentile_values = validate_percentiles(percentiles)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
service = ReportingService(uow)
|
||||||
|
context = service.build_scenario_distribution_context(
|
||||||
|
scenario, include_options, iterations or DEFAULT_ITERATIONS, percentile_values, request
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"reports/scenario_distribution.html",
|
||||||
|
context,
|
||||||
|
)
|
||||||
@@ -1,42 +1,656 @@
|
|||||||
from datetime import datetime
|
from __future__ import annotations
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
from datetime import date
|
||||||
from pydantic import BaseModel, ConfigDict
|
from types import SimpleNamespace
|
||||||
from sqlalchemy.orm import Session
|
from typing import List
|
||||||
|
|
||||||
from models.scenario import Scenario
|
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
|
||||||
from routes.dependencies import get_db
|
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/scenarios", tags=["scenarios"])
|
from dependencies import (
|
||||||
|
get_pricing_metadata,
|
||||||
|
get_unit_of_work,
|
||||||
|
require_any_role,
|
||||||
|
require_any_role_html,
|
||||||
|
require_roles,
|
||||||
|
require_roles_html,
|
||||||
|
require_scenario_resource,
|
||||||
|
require_scenario_resource_html,
|
||||||
|
)
|
||||||
|
from models import ResourceType, Scenario, ScenarioStatus, User
|
||||||
|
from schemas.scenario import (
|
||||||
|
ScenarioComparisonRequest,
|
||||||
|
ScenarioComparisonResponse,
|
||||||
|
ScenarioCreate,
|
||||||
|
ScenarioRead,
|
||||||
|
ScenarioUpdate,
|
||||||
|
)
|
||||||
|
from services.currency import CurrencyValidationError, normalise_currency
|
||||||
|
from services.exceptions import (
|
||||||
|
EntityConflictError,
|
||||||
|
EntityNotFoundError,
|
||||||
|
ScenarioValidationError,
|
||||||
|
)
|
||||||
|
from services.pricing import PricingMetadata
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
from routes.template_filters import create_templates
|
||||||
|
|
||||||
# Pydantic schemas
|
router = APIRouter(tags=["Scenarios"])
|
||||||
|
templates = create_templates()
|
||||||
|
|
||||||
|
READ_ROLES = ("viewer", "analyst", "project_manager", "admin")
|
||||||
|
MANAGE_ROLES = ("project_manager", "admin")
|
||||||
|
|
||||||
|
|
||||||
class ScenarioCreate(BaseModel):
|
def _to_read_model(scenario: Scenario) -> ScenarioRead:
|
||||||
name: str
|
return ScenarioRead.model_validate(scenario)
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ScenarioRead(ScenarioCreate):
|
def _resource_type_choices() -> list[tuple[str, str]]:
|
||||||
id: int
|
return [
|
||||||
created_at: datetime
|
(resource.value, resource.value.replace("_", " ").title())
|
||||||
updated_at: Optional[datetime] = None
|
for resource in ResourceType
|
||||||
model_config = ConfigDict(from_attributes=True)
|
]
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ScenarioRead)
|
def _scenario_status_choices() -> list[tuple[str, str]]:
|
||||||
def create_scenario(scenario: ScenarioCreate, db: Session = Depends(get_db)):
|
return [
|
||||||
db_s = db.query(Scenario).filter(Scenario.name == scenario.name).first()
|
(status.value, status.value.title()) for status in ScenarioStatus
|
||||||
if db_s:
|
]
|
||||||
raise HTTPException(status_code=400, detail="Scenario already exists")
|
|
||||||
new_s = Scenario(name=scenario.name, description=scenario.description)
|
|
||||||
db.add(new_s)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(new_s)
|
|
||||||
return new_s
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=list[ScenarioRead])
|
def _require_project_repo(uow: UnitOfWork):
|
||||||
def list_scenarios(db: Session = Depends(get_db)):
|
if not uow.projects:
|
||||||
return db.query(Scenario).all()
|
raise RuntimeError("Project repository not initialised")
|
||||||
|
return uow.projects
|
||||||
|
|
||||||
|
|
||||||
|
def _require_scenario_repo(uow: UnitOfWork):
|
||||||
|
if not uow.scenarios:
|
||||||
|
raise RuntimeError("Scenario repository not initialised")
|
||||||
|
return uow.scenarios
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/scenarios",
|
||||||
|
response_model=List[ScenarioRead],
|
||||||
|
)
|
||||||
|
def list_scenarios_for_project(
|
||||||
|
project_id: int,
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> List[ScenarioRead]:
|
||||||
|
project_repo = _require_project_repo(uow)
|
||||||
|
scenario_repo = _require_scenario_repo(uow)
|
||||||
|
try:
|
||||||
|
project_repo.get(project_id)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
scenarios = scenario_repo.list_for_project(project_id)
|
||||||
|
return [_to_read_model(scenario) for scenario in scenarios]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects/{project_id}/scenarios/compare",
|
||||||
|
response_model=ScenarioComparisonResponse,
|
||||||
|
status_code=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
def compare_scenarios(
|
||||||
|
project_id: int,
|
||||||
|
payload: ScenarioComparisonRequest,
|
||||||
|
_: User = Depends(require_any_role(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> ScenarioComparisonResponse:
|
||||||
|
try:
|
||||||
|
_require_project_repo(uow).get(project_id)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
scenarios = uow.validate_scenarios_for_comparison(payload.scenario_ids)
|
||||||
|
if any(scenario.project_id != project_id for scenario in scenarios):
|
||||||
|
raise ScenarioValidationError(
|
||||||
|
code="SCENARIO_PROJECT_MISMATCH",
|
||||||
|
message="Selected scenarios do not belong to the same project.",
|
||||||
|
scenario_ids=[
|
||||||
|
scenario.id for scenario in scenarios if scenario.id is not None
|
||||||
|
],
|
||||||
|
)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
except ScenarioValidationError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail={
|
||||||
|
"code": exc.code,
|
||||||
|
"message": exc.message,
|
||||||
|
"scenario_ids": list(exc.scenario_ids or []),
|
||||||
|
},
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return ScenarioComparisonResponse(
|
||||||
|
project_id=project_id,
|
||||||
|
scenarios=[_to_read_model(scenario) for scenario in scenarios],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects/{project_id}/scenarios",
|
||||||
|
response_model=ScenarioRead,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
def create_scenario_for_project(
|
||||||
|
project_id: int,
|
||||||
|
payload: ScenarioCreate,
|
||||||
|
_: User = Depends(require_roles(*MANAGE_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
) -> ScenarioRead:
|
||||||
|
project_repo = _require_project_repo(uow)
|
||||||
|
scenario_repo = _require_scenario_repo(uow)
|
||||||
|
try:
|
||||||
|
project_repo.get(project_id)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
scenario_data = payload.model_dump()
|
||||||
|
if not scenario_data.get("currency") and metadata.default_currency:
|
||||||
|
scenario_data["currency"] = metadata.default_currency
|
||||||
|
scenario = Scenario(project_id=project_id, **scenario_data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
created = scenario_repo.create(scenario)
|
||||||
|
except EntityConflictError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT, detail=str(exc)) from exc
|
||||||
|
return _to_read_model(created)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/scenarios/ui",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.project_scenario_list",
|
||||||
|
)
|
||||||
|
def project_scenario_list_page(
|
||||||
|
project_id: int,
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
try:
|
||||||
|
project = _require_project_repo(uow).get(
|
||||||
|
project_id, with_children=True)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
scenarios = sorted(
|
||||||
|
project.scenarios,
|
||||||
|
key=lambda scenario: scenario.updated_at or scenario.created_at,
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
scenario_totals = {
|
||||||
|
"total": len(scenarios),
|
||||||
|
"active": sum(
|
||||||
|
1 for scenario in scenarios if scenario.status == ScenarioStatus.ACTIVE
|
||||||
|
),
|
||||||
|
"draft": sum(
|
||||||
|
1 for scenario in scenarios if scenario.status == ScenarioStatus.DRAFT
|
||||||
|
),
|
||||||
|
"archived": sum(
|
||||||
|
1 for scenario in scenarios if scenario.status == ScenarioStatus.ARCHIVED
|
||||||
|
),
|
||||||
|
"latest_update": max(
|
||||||
|
(
|
||||||
|
scenario.updated_at or scenario.created_at
|
||||||
|
for scenario in scenarios
|
||||||
|
if scenario.updated_at or scenario.created_at
|
||||||
|
),
|
||||||
|
default=None,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/list.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenarios": scenarios,
|
||||||
|
"scenario_totals": scenario_totals,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scenarios/{scenario_id}", response_model=ScenarioRead)
|
||||||
|
def get_scenario(
|
||||||
|
scenario: Scenario = Depends(require_scenario_resource()),
|
||||||
|
) -> ScenarioRead:
|
||||||
|
return _to_read_model(scenario)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/scenarios/{scenario_id}", response_model=ScenarioRead)
|
||||||
|
def update_scenario(
|
||||||
|
payload: ScenarioUpdate,
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource(require_manage=True)
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> ScenarioRead:
|
||||||
|
update_data = payload.model_dump(exclude_unset=True)
|
||||||
|
for field, value in update_data.items():
|
||||||
|
setattr(scenario, field, value)
|
||||||
|
|
||||||
|
uow.flush()
|
||||||
|
return _to_read_model(scenario)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/scenarios/{scenario_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
def delete_scenario(
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource(require_manage=True)
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> None:
|
||||||
|
_require_scenario_repo(uow).delete(scenario.id)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalise(value: str | None) -> str | None:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
value = value.strip()
|
||||||
|
return value or None
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_date(value: str | None) -> date | None:
|
||||||
|
value = _normalise(value)
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
return date.fromisoformat(value)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_discount_rate(value: str | None) -> float | None:
|
||||||
|
value = _normalise(value)
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _scenario_form_state(
|
||||||
|
*,
|
||||||
|
project_id: int,
|
||||||
|
name: str,
|
||||||
|
description: str | None,
|
||||||
|
status: ScenarioStatus,
|
||||||
|
start_date: date | None,
|
||||||
|
end_date: date | None,
|
||||||
|
discount_rate: float | None,
|
||||||
|
currency: str | None,
|
||||||
|
primary_resource: ResourceType | None,
|
||||||
|
scenario_id: int | None = None,
|
||||||
|
) -> SimpleNamespace:
|
||||||
|
return SimpleNamespace(
|
||||||
|
id=scenario_id,
|
||||||
|
project_id=project_id,
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
status=status,
|
||||||
|
start_date=start_date,
|
||||||
|
end_date=end_date,
|
||||||
|
discount_rate=discount_rate,
|
||||||
|
currency=currency,
|
||||||
|
primary_resource=primary_resource,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/projects/{project_id}/scenarios/new",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.create_scenario_form",
|
||||||
|
)
|
||||||
|
def create_scenario_form(
|
||||||
|
project_id: int,
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
try:
|
||||||
|
project = _require_project_repo(uow).get(project_id)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": None,
|
||||||
|
"scenario_statuses": _scenario_status_choices(),
|
||||||
|
"resource_types": _resource_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"scenarios.create_scenario_submit", project_id=project_id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"projects.view_project", project_id=project_id
|
||||||
|
),
|
||||||
|
"default_currency": metadata.default_currency,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/projects/{project_id}/scenarios/new",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.create_scenario_submit",
|
||||||
|
)
|
||||||
|
def create_scenario_submit(
|
||||||
|
project_id: int,
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
name: str = Form(...),
|
||||||
|
description: str | None = Form(None),
|
||||||
|
status_value: str = Form(ScenarioStatus.DRAFT.value),
|
||||||
|
start_date: str | None = Form(None),
|
||||||
|
end_date: str | None = Form(None),
|
||||||
|
discount_rate: str | None = Form(None),
|
||||||
|
currency: str | None = Form(None),
|
||||||
|
primary_resource: str | None = Form(None),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
):
|
||||||
|
project_repo = _require_project_repo(uow)
|
||||||
|
scenario_repo = _require_scenario_repo(uow)
|
||||||
|
try:
|
||||||
|
project = project_repo.get(project_id)
|
||||||
|
except EntityNotFoundError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
status_enum = ScenarioStatus(status_value)
|
||||||
|
except ValueError:
|
||||||
|
status_enum = ScenarioStatus.DRAFT
|
||||||
|
|
||||||
|
resource_enum = None
|
||||||
|
if primary_resource:
|
||||||
|
try:
|
||||||
|
resource_enum = ResourceType(primary_resource)
|
||||||
|
except ValueError:
|
||||||
|
resource_enum = None
|
||||||
|
|
||||||
|
name_value = name.strip()
|
||||||
|
description_value = _normalise(description)
|
||||||
|
start_date_value = _parse_date(start_date)
|
||||||
|
end_date_value = _parse_date(end_date)
|
||||||
|
discount_rate_value = _parse_discount_rate(discount_rate)
|
||||||
|
currency_input = _normalise(currency)
|
||||||
|
effective_currency = currency_input or metadata.default_currency
|
||||||
|
|
||||||
|
try:
|
||||||
|
currency_value = (
|
||||||
|
normalise_currency(effective_currency)
|
||||||
|
if effective_currency else None
|
||||||
|
)
|
||||||
|
except CurrencyValidationError as exc:
|
||||||
|
form_state = _scenario_form_state(
|
||||||
|
project_id=project_id,
|
||||||
|
name=name_value,
|
||||||
|
description=description_value,
|
||||||
|
status=status_enum,
|
||||||
|
start_date=start_date_value,
|
||||||
|
end_date=end_date_value,
|
||||||
|
discount_rate=discount_rate_value,
|
||||||
|
currency=currency_input or metadata.default_currency,
|
||||||
|
primary_resource=resource_enum,
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": form_state,
|
||||||
|
"scenario_statuses": _scenario_status_choices(),
|
||||||
|
"resource_types": _resource_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"scenarios.create_scenario_submit", project_id=project_id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"projects.view_project", project_id=project_id
|
||||||
|
),
|
||||||
|
"error": str(exc),
|
||||||
|
"error_field": "currency",
|
||||||
|
"default_currency": metadata.default_currency,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario = Scenario(
|
||||||
|
project_id=project_id,
|
||||||
|
name=name_value,
|
||||||
|
description=description_value,
|
||||||
|
status=status_enum,
|
||||||
|
start_date=start_date_value,
|
||||||
|
end_date=end_date_value,
|
||||||
|
discount_rate=discount_rate_value,
|
||||||
|
currency=currency_value,
|
||||||
|
primary_resource=resource_enum,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
scenario_repo.create(scenario)
|
||||||
|
except EntityConflictError:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": scenario,
|
||||||
|
"scenario_statuses": _scenario_status_choices(),
|
||||||
|
"resource_types": _resource_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"scenarios.create_scenario_submit", project_id=project_id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"projects.view_project", project_id=project_id
|
||||||
|
),
|
||||||
|
"error": "Scenario with this name already exists for this project.",
|
||||||
|
"error_field": "name",
|
||||||
|
"default_currency": metadata.default_currency,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("projects.view_project", project_id=project_id),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/scenarios/{scenario_id}/view",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.view_scenario",
|
||||||
|
)
|
||||||
|
def view_scenario(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource_html(with_children=True)
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
project = _require_project_repo(uow).get(scenario.project_id)
|
||||||
|
financial_inputs = sorted(
|
||||||
|
scenario.financial_inputs, key=lambda item: item.created_at
|
||||||
|
)
|
||||||
|
simulation_parameters = sorted(
|
||||||
|
scenario.simulation_parameters, key=lambda item: item.created_at
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario_metrics = {
|
||||||
|
"financial_count": len(financial_inputs),
|
||||||
|
"parameter_count": len(simulation_parameters),
|
||||||
|
"currency": scenario.currency,
|
||||||
|
"primary_resource": scenario.primary_resource.value.replace('_', ' ').title() if scenario.primary_resource else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/detail.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": scenario,
|
||||||
|
"scenario_metrics": scenario_metrics,
|
||||||
|
"financial_inputs": financial_inputs,
|
||||||
|
"simulation_parameters": simulation_parameters,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/scenarios/{scenario_id}/edit",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.edit_scenario_form",
|
||||||
|
)
|
||||||
|
def edit_scenario_form(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource_html(require_manage=True)
|
||||||
|
),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
project = _require_project_repo(uow).get(scenario.project_id)
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": scenario,
|
||||||
|
"scenario_statuses": _scenario_status_choices(),
|
||||||
|
"resource_types": _resource_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"scenarios.edit_scenario_submit", scenario_id=scenario.id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"scenarios.view_scenario", scenario_id=scenario.id
|
||||||
|
),
|
||||||
|
"default_currency": metadata.default_currency,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/scenarios/{scenario_id}/edit",
|
||||||
|
include_in_schema=False,
|
||||||
|
name="scenarios.edit_scenario_submit",
|
||||||
|
)
|
||||||
|
def edit_scenario_submit(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
scenario: Scenario = Depends(
|
||||||
|
require_scenario_resource_html(require_manage=True)
|
||||||
|
),
|
||||||
|
name: str = Form(...),
|
||||||
|
description: str | None = Form(None),
|
||||||
|
status_value: str = Form(ScenarioStatus.DRAFT.value),
|
||||||
|
start_date: str | None = Form(None),
|
||||||
|
end_date: str | None = Form(None),
|
||||||
|
discount_rate: str | None = Form(None),
|
||||||
|
currency: str | None = Form(None),
|
||||||
|
primary_resource: str | None = Form(None),
|
||||||
|
uow: UnitOfWork = Depends(get_unit_of_work),
|
||||||
|
metadata: PricingMetadata = Depends(get_pricing_metadata),
|
||||||
|
):
|
||||||
|
project = _require_project_repo(uow).get(scenario.project_id)
|
||||||
|
|
||||||
|
name_value = name.strip()
|
||||||
|
description_value = _normalise(description)
|
||||||
|
try:
|
||||||
|
scenario.status = ScenarioStatus(status_value)
|
||||||
|
except ValueError:
|
||||||
|
scenario.status = ScenarioStatus.DRAFT
|
||||||
|
status_enum = scenario.status
|
||||||
|
|
||||||
|
resource_enum = None
|
||||||
|
if primary_resource:
|
||||||
|
try:
|
||||||
|
resource_enum = ResourceType(primary_resource)
|
||||||
|
except ValueError:
|
||||||
|
resource_enum = None
|
||||||
|
|
||||||
|
start_date_value = _parse_date(start_date)
|
||||||
|
end_date_value = _parse_date(end_date)
|
||||||
|
discount_rate_value = _parse_discount_rate(discount_rate)
|
||||||
|
currency_input = _normalise(currency)
|
||||||
|
|
||||||
|
try:
|
||||||
|
currency_value = normalise_currency(currency_input)
|
||||||
|
except CurrencyValidationError as exc:
|
||||||
|
form_state = _scenario_form_state(
|
||||||
|
scenario_id=scenario.id,
|
||||||
|
project_id=scenario.project_id,
|
||||||
|
name=name_value,
|
||||||
|
description=description_value,
|
||||||
|
status=status_enum,
|
||||||
|
start_date=start_date_value,
|
||||||
|
end_date=end_date_value,
|
||||||
|
discount_rate=discount_rate_value,
|
||||||
|
currency=currency_input,
|
||||||
|
primary_resource=resource_enum,
|
||||||
|
)
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"scenarios/form.html",
|
||||||
|
{
|
||||||
|
"project": project,
|
||||||
|
"scenario": form_state,
|
||||||
|
"scenario_statuses": _scenario_status_choices(),
|
||||||
|
"resource_types": _resource_type_choices(),
|
||||||
|
"form_action": request.url_for(
|
||||||
|
"scenarios.edit_scenario_submit", scenario_id=scenario.id
|
||||||
|
),
|
||||||
|
"cancel_url": request.url_for(
|
||||||
|
"scenarios.view_scenario", scenario_id=scenario.id
|
||||||
|
),
|
||||||
|
"error": str(exc),
|
||||||
|
"error_field": "currency",
|
||||||
|
"default_currency": metadata.default_currency,
|
||||||
|
},
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
scenario.name = name_value
|
||||||
|
scenario.description = description_value
|
||||||
|
scenario.start_date = start_date_value
|
||||||
|
scenario.end_date = end_date_value
|
||||||
|
scenario.discount_rate = discount_rate_value
|
||||||
|
scenario.currency = currency_value
|
||||||
|
scenario.primary_resource = resource_enum
|
||||||
|
|
||||||
|
uow.flush()
|
||||||
|
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("scenarios.view_scenario", scenario_id=scenario.id),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,110 +0,0 @@
|
|||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from pydantic import BaseModel, Field, model_validator
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
from services.settings import (
|
|
||||||
CSS_COLOR_DEFAULTS,
|
|
||||||
get_css_color_settings,
|
|
||||||
list_css_env_override_rows,
|
|
||||||
read_css_color_env_overrides,
|
|
||||||
update_css_color_settings,
|
|
||||||
get_theme_settings,
|
|
||||||
save_theme_settings,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/settings", tags=["Settings"])
|
|
||||||
|
|
||||||
|
|
||||||
class CSSSettingsPayload(BaseModel):
|
|
||||||
variables: Dict[str, str] = Field(default_factory=dict)
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def _validate_allowed_keys(self) -> "CSSSettingsPayload":
|
|
||||||
invalid = set(self.variables.keys()) - set(CSS_COLOR_DEFAULTS.keys())
|
|
||||||
if invalid:
|
|
||||||
invalid_keys = ", ".join(sorted(invalid))
|
|
||||||
raise ValueError(
|
|
||||||
f"Unsupported CSS variables: {invalid_keys}."
|
|
||||||
" Accepted keys align with the default theme variables."
|
|
||||||
)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class EnvOverride(BaseModel):
|
|
||||||
css_key: str
|
|
||||||
env_var: str
|
|
||||||
value: str
|
|
||||||
|
|
||||||
|
|
||||||
class CSSSettingsResponse(BaseModel):
|
|
||||||
variables: Dict[str, str]
|
|
||||||
env_overrides: Dict[str, str] = Field(default_factory=dict)
|
|
||||||
env_sources: List[EnvOverride] = Field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/css", response_model=CSSSettingsResponse)
|
|
||||||
def read_css_settings(db: Session = Depends(get_db)) -> CSSSettingsResponse:
|
|
||||||
try:
|
|
||||||
values = get_css_color_settings(db)
|
|
||||||
env_overrides = read_css_color_env_overrides()
|
|
||||||
env_sources = [
|
|
||||||
EnvOverride(**row) for row in list_css_env_override_rows()
|
|
||||||
]
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=str(exc),
|
|
||||||
) from exc
|
|
||||||
return CSSSettingsResponse(
|
|
||||||
variables=values,
|
|
||||||
env_overrides=env_overrides,
|
|
||||||
env_sources=env_sources,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put(
|
|
||||||
"/css", response_model=CSSSettingsResponse, status_code=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
def update_css_settings(
|
|
||||||
payload: CSSSettingsPayload, db: Session = Depends(get_db)
|
|
||||||
) -> CSSSettingsResponse:
|
|
||||||
try:
|
|
||||||
values = update_css_color_settings(db, payload.variables)
|
|
||||||
env_overrides = read_css_color_env_overrides()
|
|
||||||
env_sources = [
|
|
||||||
EnvOverride(**row) for row in list_css_env_override_rows()
|
|
||||||
]
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
|
||||||
detail=str(exc),
|
|
||||||
) from exc
|
|
||||||
return CSSSettingsResponse(
|
|
||||||
variables=values,
|
|
||||||
env_overrides=env_overrides,
|
|
||||||
env_sources=env_sources,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ThemeSettings(BaseModel):
|
|
||||||
theme_name: str
|
|
||||||
primary_color: str
|
|
||||||
secondary_color: str
|
|
||||||
accent_color: str
|
|
||||||
background_color: str
|
|
||||||
text_color: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/theme")
|
|
||||||
async def update_theme(theme_data: ThemeSettings, db: Session = Depends(get_db)):
|
|
||||||
data_dict = theme_data.model_dump()
|
|
||||||
save_theme_settings(db, data_dict)
|
|
||||||
return {"message": "Theme updated", "theme": data_dict}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/theme")
|
|
||||||
async def get_theme(db: Session = Depends(get_db)):
|
|
||||||
return get_theme_settings(db)
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
from typing import Dict, List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from pydantic import BaseModel, PositiveInt
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.parameters import Parameter
|
|
||||||
from models.scenario import Scenario
|
|
||||||
from models.simulation_result import SimulationResult
|
|
||||||
from routes.dependencies import get_db
|
|
||||||
from services.reporting import generate_report
|
|
||||||
from services.simulation import run_simulation
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/simulations", tags=["Simulations"])
|
|
||||||
|
|
||||||
|
|
||||||
class SimulationParameterInput(BaseModel):
|
|
||||||
name: str
|
|
||||||
value: float
|
|
||||||
distribution: Optional[str] = "normal"
|
|
||||||
std_dev: Optional[float] = None
|
|
||||||
min: Optional[float] = None
|
|
||||||
max: Optional[float] = None
|
|
||||||
mode: Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SimulationRunRequest(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
iterations: PositiveInt = 1000
|
|
||||||
parameters: Optional[List[SimulationParameterInput]] = None
|
|
||||||
seed: Optional[int] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SimulationResultItem(BaseModel):
|
|
||||||
iteration: int
|
|
||||||
result: float
|
|
||||||
|
|
||||||
|
|
||||||
class SimulationRunResponse(BaseModel):
|
|
||||||
scenario_id: int
|
|
||||||
iterations: int
|
|
||||||
results: List[SimulationResultItem]
|
|
||||||
summary: Dict[str, float | int]
|
|
||||||
|
|
||||||
|
|
||||||
def _load_parameters(
|
|
||||||
db: Session, scenario_id: int
|
|
||||||
) -> List[SimulationParameterInput]:
|
|
||||||
db_params = (
|
|
||||||
db.query(Parameter)
|
|
||||||
.filter(Parameter.scenario_id == scenario_id)
|
|
||||||
.order_by(Parameter.id)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
return [
|
|
||||||
SimulationParameterInput(
|
|
||||||
name=item.name,
|
|
||||||
value=item.value,
|
|
||||||
)
|
|
||||||
for item in db_params
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/run", response_model=SimulationRunResponse)
|
|
||||||
async def simulate(
|
|
||||||
payload: SimulationRunRequest, db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
scenario = (
|
|
||||||
db.query(Scenario).filter(Scenario.id == payload.scenario_id).first()
|
|
||||||
)
|
|
||||||
if scenario is None:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Scenario not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
parameters = payload.parameters or _load_parameters(db, payload.scenario_id)
|
|
||||||
if not parameters:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="No parameters provided",
|
|
||||||
)
|
|
||||||
|
|
||||||
raw_results = run_simulation(
|
|
||||||
[param.model_dump(exclude_none=True) for param in parameters],
|
|
||||||
iterations=payload.iterations,
|
|
||||||
seed=payload.seed,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not raw_results:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Simulation produced no results",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Persist results (replace existing values for scenario)
|
|
||||||
db.query(SimulationResult).filter(
|
|
||||||
SimulationResult.scenario_id == payload.scenario_id
|
|
||||||
).delete()
|
|
||||||
db.bulk_save_objects(
|
|
||||||
[
|
|
||||||
SimulationResult(
|
|
||||||
scenario_id=payload.scenario_id,
|
|
||||||
iteration=item["iteration"],
|
|
||||||
result=item["result"],
|
|
||||||
)
|
|
||||||
for item in raw_results
|
|
||||||
]
|
|
||||||
)
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
summary = generate_report(raw_results)
|
|
||||||
|
|
||||||
response = SimulationRunResponse(
|
|
||||||
scenario_id=payload.scenario_id,
|
|
||||||
iterations=payload.iterations,
|
|
||||||
results=[
|
|
||||||
SimulationResultItem(
|
|
||||||
iteration=int(item["iteration"]),
|
|
||||||
result=float(item["result"]),
|
|
||||||
)
|
|
||||||
for item in raw_results
|
|
||||||
],
|
|
||||||
summary=summary,
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
147
routes/template_filters.py
Normal file
147
routes/template_filters.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
|
||||||
|
from services.navigation import NavigationService
|
||||||
|
from services.session import AuthSession
|
||||||
|
from services.unit_of_work import UnitOfWork
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def format_datetime(value: Any) -> str:
|
||||||
|
"""Render datetime values consistently for templates."""
|
||||||
|
if not isinstance(value, datetime):
|
||||||
|
return ""
|
||||||
|
if value.tzinfo is None:
|
||||||
|
value = value.replace(tzinfo=timezone.utc)
|
||||||
|
return value.strftime("%Y-%m-%d %H:%M UTC")
|
||||||
|
|
||||||
|
|
||||||
|
def currency_display(value: Any, currency_code: str | None) -> str:
|
||||||
|
"""Format numeric values with currency context."""
|
||||||
|
if value is None:
|
||||||
|
return "—"
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
formatted_value = f"{value:,.2f}"
|
||||||
|
else:
|
||||||
|
formatted_value = str(value)
|
||||||
|
if currency_code:
|
||||||
|
return f"{currency_code} {formatted_value}"
|
||||||
|
return formatted_value
|
||||||
|
|
||||||
|
|
||||||
|
def format_metric(value: Any, metric_name: str, currency_code: str | None = None) -> str:
|
||||||
|
"""Format metrics according to their semantic type."""
|
||||||
|
if value is None:
|
||||||
|
return "—"
|
||||||
|
|
||||||
|
currency_metrics = {
|
||||||
|
"npv",
|
||||||
|
"inflows",
|
||||||
|
"outflows",
|
||||||
|
"net",
|
||||||
|
"total_inflows",
|
||||||
|
"total_outflows",
|
||||||
|
"total_net",
|
||||||
|
}
|
||||||
|
if metric_name in currency_metrics and currency_code:
|
||||||
|
return currency_display(value, currency_code)
|
||||||
|
|
||||||
|
percentage_metrics = {"irr", "payback_period"}
|
||||||
|
if metric_name in percentage_metrics:
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
return f"{value:.2f}%"
|
||||||
|
return f"{value}%"
|
||||||
|
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
return f"{value:,.2f}"
|
||||||
|
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
|
||||||
|
def percentage_display(value: Any) -> str:
|
||||||
|
"""Format numeric values as percentages."""
|
||||||
|
if value is None:
|
||||||
|
return "—"
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
return f"{value:.2f}%"
|
||||||
|
return f"{value}%"
|
||||||
|
|
||||||
|
|
||||||
|
def period_display(value: Any) -> str:
|
||||||
|
"""Format period values in years."""
|
||||||
|
if value is None:
|
||||||
|
return "—"
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
if value == int(value):
|
||||||
|
return f"{int(value)} years"
|
||||||
|
return f"{value:.1f} years"
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
|
||||||
|
def register_common_filters(templates: Jinja2Templates) -> None:
|
||||||
|
templates.env.filters["format_datetime"] = format_datetime
|
||||||
|
templates.env.filters["currency_display"] = currency_display
|
||||||
|
templates.env.filters["format_metric"] = format_metric
|
||||||
|
templates.env.filters["percentage_display"] = percentage_display
|
||||||
|
templates.env.filters["period_display"] = period_display
|
||||||
|
|
||||||
|
|
||||||
|
def _sidebar_navigation_for_request(request: Request | None):
|
||||||
|
if request is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
cached = getattr(request.state, "_navigation_sidebar_dto", None)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
session_context = getattr(request.state, "auth_session", None)
|
||||||
|
if isinstance(session_context, AuthSession):
|
||||||
|
session = session_context
|
||||||
|
else:
|
||||||
|
session = AuthSession.anonymous()
|
||||||
|
|
||||||
|
try:
|
||||||
|
with UnitOfWork() as uow:
|
||||||
|
if not uow.navigation:
|
||||||
|
logger.debug("Navigation repository unavailable for sidebar rendering")
|
||||||
|
sidebar_dto = None
|
||||||
|
else:
|
||||||
|
service = NavigationService(uow.navigation)
|
||||||
|
sidebar_dto = service.build_sidebar(session=session, request=request)
|
||||||
|
except Exception: # pragma: no cover - defensive fallback for templates
|
||||||
|
logger.exception("Failed to build sidebar navigation during template render")
|
||||||
|
sidebar_dto = None
|
||||||
|
|
||||||
|
setattr(request.state, "_navigation_sidebar_dto", sidebar_dto)
|
||||||
|
return sidebar_dto
|
||||||
|
|
||||||
|
|
||||||
|
def register_navigation_globals(templates: Jinja2Templates) -> None:
|
||||||
|
templates.env.globals["get_sidebar_navigation"] = _sidebar_navigation_for_request
|
||||||
|
|
||||||
|
|
||||||
|
def create_templates() -> Jinja2Templates:
|
||||||
|
templates = Jinja2Templates(directory="templates")
|
||||||
|
register_common_filters(templates)
|
||||||
|
register_navigation_globals(templates)
|
||||||
|
return templates
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"format_datetime",
|
||||||
|
"currency_display",
|
||||||
|
"format_metric",
|
||||||
|
"percentage_display",
|
||||||
|
"period_display",
|
||||||
|
"register_common_filters",
|
||||||
|
"register_navigation_globals",
|
||||||
|
"create_templates",
|
||||||
|
]
|
||||||
851
routes/ui.py
851
routes/ui.py
@@ -1,784 +1,109 @@
|
|||||||
from collections import defaultdict
|
from __future__ import annotations
|
||||||
from datetime import datetime, timezone
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Request
|
from fastapi import APIRouter, Depends, Request
|
||||||
from fastapi.responses import HTMLResponse, JSONResponse
|
from fastapi.responses import HTMLResponse
|
||||||
from fastapi.templating import Jinja2Templates
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from models.capex import Capex
|
from dependencies import require_any_role_html, require_roles_html
|
||||||
from models.consumption import Consumption
|
from models import User
|
||||||
from models.equipment import Equipment
|
from routes.template_filters import create_templates
|
||||||
from models.maintenance import Maintenance
|
|
||||||
from models.opex import Opex
|
router = APIRouter(tags=["UI"])
|
||||||
from models.parameters import Parameter
|
templates = create_templates()
|
||||||
from models.production_output import ProductionOutput
|
|
||||||
from models.scenario import Scenario
|
READ_ROLES = ("viewer", "analyst", "project_manager", "admin")
|
||||||
from models.simulation_result import SimulationResult
|
MANAGE_ROLES = ("project_manager", "admin")
|
||||||
from routes.dependencies import get_db
|
|
||||||
from services.reporting import generate_report
|
|
||||||
from models.currency import Currency
|
@router.get(
|
||||||
from routes.currencies import DEFAULT_CURRENCY_CODE, _ensure_default_currency
|
"/ui/simulations",
|
||||||
from services.settings import (
|
response_class=HTMLResponse,
|
||||||
CSS_COLOR_DEFAULTS,
|
include_in_schema=False,
|
||||||
get_css_color_settings,
|
name="ui.simulations",
|
||||||
list_css_env_override_rows,
|
|
||||||
read_css_color_env_overrides,
|
|
||||||
)
|
)
|
||||||
|
def simulations_dashboard(
|
||||||
|
|
||||||
CURRENCY_CHOICES: list[Dict[str, Any]] = [
|
|
||||||
{"id": "USD", "name": "US Dollar (USD)"},
|
|
||||||
{"id": "EUR", "name": "Euro (EUR)"},
|
|
||||||
{"id": "CLP", "name": "Chilean Peso (CLP)"},
|
|
||||||
{"id": "RMB", "name": "Chinese Yuan (RMB)"},
|
|
||||||
{"id": "GBP", "name": "British Pound (GBP)"},
|
|
||||||
{"id": "CAD", "name": "Canadian Dollar (CAD)"},
|
|
||||||
{"id": "AUD", "name": "Australian Dollar (AUD)"},
|
|
||||||
]
|
|
||||||
|
|
||||||
MEASUREMENT_UNITS: list[Dict[str, Any]] = [
|
|
||||||
{"id": "tonnes", "name": "Tonnes", "symbol": "t"},
|
|
||||||
{"id": "kilograms", "name": "Kilograms", "symbol": "kg"},
|
|
||||||
{"id": "pounds", "name": "Pounds", "symbol": "lb"},
|
|
||||||
{"id": "liters", "name": "Liters", "symbol": "L"},
|
|
||||||
{"id": "cubic_meters", "name": "Cubic Meters", "symbol": "m3"},
|
|
||||||
{"id": "kilowatt_hours", "name": "Kilowatt Hours", "symbol": "kWh"},
|
|
||||||
]
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
# Set up Jinja2 templates directory
|
|
||||||
templates = Jinja2Templates(directory="templates")
|
|
||||||
|
|
||||||
|
|
||||||
def _context(
|
|
||||||
request: Request, extra: Optional[Dict[str, Any]] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
payload: Dict[str, Any] = {
|
|
||||||
"request": request,
|
|
||||||
"current_year": datetime.now(timezone.utc).year,
|
|
||||||
}
|
|
||||||
if extra:
|
|
||||||
payload.update(extra)
|
|
||||||
return payload
|
|
||||||
|
|
||||||
|
|
||||||
def _render(
|
|
||||||
request: Request,
|
request: Request,
|
||||||
template_name: str,
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
extra: Optional[Dict[str, Any]] = None,
|
) -> HTMLResponse:
|
||||||
):
|
return templates.TemplateResponse(
|
||||||
context = _context(request, extra)
|
request,
|
||||||
return templates.TemplateResponse(request, template_name, context)
|
"simulations.html",
|
||||||
|
|
||||||
|
|
||||||
def _format_currency(value: float) -> str:
|
|
||||||
return f"${value:,.2f}"
|
|
||||||
|
|
||||||
|
|
||||||
def _format_decimal(value: float) -> str:
|
|
||||||
return f"{value:,.2f}"
|
|
||||||
|
|
||||||
|
|
||||||
def _format_int(value: int) -> str:
|
|
||||||
return f"{value:,}"
|
|
||||||
|
|
||||||
|
|
||||||
def _load_scenarios(db: Session) -> Dict[str, Any]:
|
|
||||||
scenarios: list[Dict[str, Any]] = [
|
|
||||||
{
|
{
|
||||||
"id": item.id,
|
"title": "Simulations",
|
||||||
"name": item.name,
|
|
||||||
"description": item.description,
|
|
||||||
}
|
|
||||||
for item in db.query(Scenario).order_by(Scenario.name).all()
|
|
||||||
]
|
|
||||||
return {"scenarios": scenarios}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_parameters(db: Session) -> Dict[str, Any]:
|
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for param in db.query(Parameter).order_by(
|
|
||||||
Parameter.scenario_id, Parameter.id
|
|
||||||
):
|
|
||||||
grouped[param.scenario_id].append(
|
|
||||||
{
|
|
||||||
"id": param.id,
|
|
||||||
"name": param.name,
|
|
||||||
"value": param.value,
|
|
||||||
"distribution_type": param.distribution_type,
|
|
||||||
"distribution_parameters": param.distribution_parameters,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {"parameters_by_scenario": dict(grouped)}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_costs(db: Session) -> Dict[str, Any]:
|
|
||||||
capex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for capex in db.query(Capex).order_by(Capex.scenario_id, Capex.id).all():
|
|
||||||
capex_grouped[int(getattr(capex, "scenario_id"))].append(
|
|
||||||
{
|
|
||||||
"id": int(getattr(capex, "id")),
|
|
||||||
"scenario_id": int(getattr(capex, "scenario_id")),
|
|
||||||
"amount": float(getattr(capex, "amount", 0.0)),
|
|
||||||
"description": getattr(capex, "description", "") or "",
|
|
||||||
"currency_code": getattr(capex, "currency_code", "USD")
|
|
||||||
or "USD",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
opex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for opex in db.query(Opex).order_by(Opex.scenario_id, Opex.id).all():
|
|
||||||
opex_grouped[int(getattr(opex, "scenario_id"))].append(
|
|
||||||
{
|
|
||||||
"id": int(getattr(opex, "id")),
|
|
||||||
"scenario_id": int(getattr(opex, "scenario_id")),
|
|
||||||
"amount": float(getattr(opex, "amount", 0.0)),
|
|
||||||
"description": getattr(opex, "description", "") or "",
|
|
||||||
"currency_code": getattr(opex, "currency_code", "USD") or "USD",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"capex_by_scenario": dict(capex_grouped),
|
|
||||||
"opex_by_scenario": dict(opex_grouped),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_currencies(db: Session) -> Dict[str, Any]:
|
|
||||||
items: list[Dict[str, Any]] = []
|
|
||||||
for c in (
|
|
||||||
db.query(Currency)
|
|
||||||
.filter_by(is_active=True)
|
|
||||||
.order_by(Currency.code)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
items.append(
|
|
||||||
{"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol}
|
|
||||||
)
|
|
||||||
if not items:
|
|
||||||
items.append({"id": "USD", "name": "US Dollar (USD)", "symbol": "$"})
|
|
||||||
return {"currency_options": items}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_currency_settings(db: Session) -> Dict[str, Any]:
|
|
||||||
_ensure_default_currency(db)
|
|
||||||
records = db.query(Currency).order_by(Currency.code).all()
|
|
||||||
currencies: list[Dict[str, Any]] = []
|
|
||||||
for record in records:
|
|
||||||
code_value = getattr(record, "code")
|
|
||||||
currencies.append(
|
|
||||||
{
|
|
||||||
"id": int(getattr(record, "id")),
|
|
||||||
"code": code_value,
|
|
||||||
"name": getattr(record, "name"),
|
|
||||||
"symbol": getattr(record, "symbol"),
|
|
||||||
"is_active": bool(getattr(record, "is_active", True)),
|
|
||||||
"is_default": code_value == DEFAULT_CURRENCY_CODE,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
active_count = sum(1 for item in currencies if item["is_active"])
|
|
||||||
inactive_count = len(currencies) - active_count
|
|
||||||
|
|
||||||
return {
|
|
||||||
"currencies": currencies,
|
|
||||||
"currency_stats": {
|
|
||||||
"total": len(currencies),
|
|
||||||
"active": active_count,
|
|
||||||
"inactive": inactive_count,
|
|
||||||
},
|
},
|
||||||
"default_currency_code": DEFAULT_CURRENCY_CODE,
|
|
||||||
"currency_api_base": "/api/currencies",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_css_settings(db: Session) -> Dict[str, Any]:
|
|
||||||
variables = get_css_color_settings(db)
|
|
||||||
env_overrides = read_css_color_env_overrides()
|
|
||||||
env_rows = list_css_env_override_rows()
|
|
||||||
env_meta = {row["css_key"]: row for row in env_rows}
|
|
||||||
return {
|
|
||||||
"css_variables": variables,
|
|
||||||
"css_defaults": CSS_COLOR_DEFAULTS,
|
|
||||||
"css_env_overrides": env_overrides,
|
|
||||||
"css_env_override_rows": env_rows,
|
|
||||||
"css_env_override_meta": env_meta,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_consumption(db: Session) -> Dict[str, Any]:
|
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for record in (
|
|
||||||
db.query(Consumption)
|
|
||||||
.order_by(Consumption.scenario_id, Consumption.id)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
record_id = int(getattr(record, "id"))
|
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
|
||||||
amount_value = float(getattr(record, "amount", 0.0))
|
|
||||||
description = getattr(record, "description", "") or ""
|
|
||||||
unit_name = getattr(record, "unit_name", None)
|
|
||||||
unit_symbol = getattr(record, "unit_symbol", None)
|
|
||||||
grouped[scenario_id].append(
|
|
||||||
{
|
|
||||||
"id": record_id,
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"amount": amount_value,
|
|
||||||
"description": description,
|
|
||||||
"unit_name": unit_name,
|
|
||||||
"unit_symbol": unit_symbol,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {"consumption_by_scenario": dict(grouped)}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_production(db: Session) -> Dict[str, Any]:
|
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for record in (
|
|
||||||
db.query(ProductionOutput)
|
|
||||||
.order_by(ProductionOutput.scenario_id, ProductionOutput.id)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
record_id = int(getattr(record, "id"))
|
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
|
||||||
amount_value = float(getattr(record, "amount", 0.0))
|
|
||||||
description = getattr(record, "description", "") or ""
|
|
||||||
unit_name = getattr(record, "unit_name", None)
|
|
||||||
unit_symbol = getattr(record, "unit_symbol", None)
|
|
||||||
grouped[scenario_id].append(
|
|
||||||
{
|
|
||||||
"id": record_id,
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"amount": amount_value,
|
|
||||||
"description": description,
|
|
||||||
"unit_name": unit_name,
|
|
||||||
"unit_symbol": unit_symbol,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {"production_by_scenario": dict(grouped)}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_equipment(db: Session) -> Dict[str, Any]:
|
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for record in (
|
|
||||||
db.query(Equipment).order_by(Equipment.scenario_id, Equipment.id).all()
|
|
||||||
):
|
|
||||||
record_id = int(getattr(record, "id"))
|
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
|
||||||
name_value = getattr(record, "name", "") or ""
|
|
||||||
description = getattr(record, "description", "") or ""
|
|
||||||
grouped[scenario_id].append(
|
|
||||||
{
|
|
||||||
"id": record_id,
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"name": name_value,
|
|
||||||
"description": description,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {"equipment_by_scenario": dict(grouped)}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_maintenance(db: Session) -> Dict[str, Any]:
|
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for record in (
|
|
||||||
db.query(Maintenance)
|
|
||||||
.order_by(Maintenance.scenario_id, Maintenance.maintenance_date)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
record_id = int(getattr(record, "id"))
|
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
|
||||||
equipment_id = int(getattr(record, "equipment_id"))
|
|
||||||
equipment_obj = getattr(record, "equipment", None)
|
|
||||||
equipment_name = (
|
|
||||||
getattr(equipment_obj, "name", "") if equipment_obj else ""
|
|
||||||
)
|
|
||||||
maintenance_date = getattr(record, "maintenance_date", None)
|
|
||||||
cost_value = float(getattr(record, "cost", 0.0))
|
|
||||||
description = getattr(record, "description", "") or ""
|
|
||||||
|
|
||||||
grouped[scenario_id].append(
|
|
||||||
{
|
|
||||||
"id": record_id,
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"equipment_id": equipment_id,
|
|
||||||
"equipment_name": equipment_name,
|
|
||||||
"maintenance_date": (
|
|
||||||
maintenance_date.isoformat() if maintenance_date else ""
|
|
||||||
),
|
|
||||||
"cost": cost_value,
|
|
||||||
"description": description,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {"maintenance_by_scenario": dict(grouped)}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_simulations(db: Session) -> Dict[str, Any]:
|
|
||||||
scenarios: list[Dict[str, Any]] = [
|
|
||||||
{
|
|
||||||
"id": item.id,
|
|
||||||
"name": item.name,
|
|
||||||
}
|
|
||||||
for item in db.query(Scenario).order_by(Scenario.name).all()
|
|
||||||
]
|
|
||||||
|
|
||||||
results_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
|
||||||
for record in (
|
|
||||||
db.query(SimulationResult)
|
|
||||||
.order_by(SimulationResult.scenario_id, SimulationResult.iteration)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
|
||||||
results_grouped[scenario_id].append(
|
|
||||||
{
|
|
||||||
"iteration": int(getattr(record, "iteration")),
|
|
||||||
"result": float(getattr(record, "result", 0.0)),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
runs: list[Dict[str, Any]] = []
|
|
||||||
sample_limit = 20
|
|
||||||
for item in scenarios:
|
|
||||||
scenario_id = int(item["id"])
|
|
||||||
scenario_results = results_grouped.get(scenario_id, [])
|
|
||||||
summary = (
|
|
||||||
generate_report(scenario_results)
|
|
||||||
if scenario_results
|
|
||||||
else generate_report([])
|
|
||||||
)
|
|
||||||
runs.append(
|
|
||||||
{
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"scenario_name": item["name"],
|
|
||||||
"iterations": int(summary.get("count", 0)),
|
|
||||||
"summary": summary,
|
|
||||||
"sample_results": scenario_results[:sample_limit],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"simulation_scenarios": scenarios,
|
|
||||||
"simulation_runs": runs,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_reporting(db: Session) -> Dict[str, Any]:
|
|
||||||
scenarios = _load_scenarios(db)["scenarios"]
|
|
||||||
runs = _load_simulations(db)["simulation_runs"]
|
|
||||||
|
|
||||||
summaries: list[Dict[str, Any]] = []
|
|
||||||
runs_by_scenario = {run["scenario_id"]: run for run in runs}
|
|
||||||
|
|
||||||
for scenario in scenarios:
|
|
||||||
scenario_id = scenario["id"]
|
|
||||||
run = runs_by_scenario.get(scenario_id)
|
|
||||||
summary = run["summary"] if run else generate_report([])
|
|
||||||
summaries.append(
|
|
||||||
{
|
|
||||||
"scenario_id": scenario_id,
|
|
||||||
"scenario_name": scenario["name"],
|
|
||||||
"summary": summary,
|
|
||||||
"iterations": run["iterations"] if run else 0,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"report_summaries": summaries,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|
||||||
scenarios = _load_scenarios(db)["scenarios"]
|
|
||||||
parameters_by_scenario = _load_parameters(db)["parameters_by_scenario"]
|
|
||||||
costs_context = _load_costs(db)
|
|
||||||
capex_by_scenario = costs_context["capex_by_scenario"]
|
|
||||||
opex_by_scenario = costs_context["opex_by_scenario"]
|
|
||||||
consumption_by_scenario = _load_consumption(db)["consumption_by_scenario"]
|
|
||||||
production_by_scenario = _load_production(db)["production_by_scenario"]
|
|
||||||
equipment_by_scenario = _load_equipment(db)["equipment_by_scenario"]
|
|
||||||
maintenance_by_scenario = _load_maintenance(db)["maintenance_by_scenario"]
|
|
||||||
simulation_context = _load_simulations(db)
|
|
||||||
simulation_runs = simulation_context["simulation_runs"]
|
|
||||||
|
|
||||||
runs_by_scenario = {run["scenario_id"]: run for run in simulation_runs}
|
|
||||||
|
|
||||||
def sum_amounts(
|
|
||||||
grouped: Dict[int, list[Dict[str, Any]]], field: str = "amount"
|
|
||||||
) -> float:
|
|
||||||
total = 0.0
|
|
||||||
for items in grouped.values():
|
|
||||||
for item in items:
|
|
||||||
value = item.get(field, 0.0)
|
|
||||||
if isinstance(value, (int, float)):
|
|
||||||
total += float(value)
|
|
||||||
return total
|
|
||||||
|
|
||||||
total_capex = sum_amounts(capex_by_scenario)
|
|
||||||
total_opex = sum_amounts(opex_by_scenario)
|
|
||||||
total_consumption = sum_amounts(consumption_by_scenario)
|
|
||||||
total_production = sum_amounts(production_by_scenario)
|
|
||||||
total_maintenance_cost = sum_amounts(maintenance_by_scenario, field="cost")
|
|
||||||
|
|
||||||
total_parameters = sum(
|
|
||||||
len(items) for items in parameters_by_scenario.values()
|
|
||||||
)
|
|
||||||
total_equipment = sum(
|
|
||||||
len(items) for items in equipment_by_scenario.values()
|
|
||||||
)
|
|
||||||
total_maintenance_events = sum(
|
|
||||||
len(items) for items in maintenance_by_scenario.values()
|
|
||||||
)
|
|
||||||
total_simulation_iterations = sum(
|
|
||||||
run["iterations"] for run in simulation_runs
|
|
||||||
)
|
)
|
||||||
|
|
||||||
scenario_rows: list[Dict[str, Any]] = []
|
|
||||||
scenario_labels: list[str] = []
|
|
||||||
scenario_capex: list[float] = []
|
|
||||||
scenario_opex: list[float] = []
|
|
||||||
activity_labels: list[str] = []
|
|
||||||
activity_production: list[float] = []
|
|
||||||
activity_consumption: list[float] = []
|
|
||||||
|
|
||||||
for scenario in scenarios:
|
@router.get(
|
||||||
scenario_id = scenario["id"]
|
"/ui/reporting",
|
||||||
scenario_name = scenario["name"]
|
response_class=HTMLResponse,
|
||||||
param_count = len(parameters_by_scenario.get(scenario_id, []))
|
include_in_schema=False,
|
||||||
equipment_count = len(equipment_by_scenario.get(scenario_id, []))
|
name="ui.reporting",
|
||||||
maintenance_count = len(maintenance_by_scenario.get(scenario_id, []))
|
)
|
||||||
|
def reporting_dashboard(
|
||||||
capex_total = sum(
|
request: Request,
|
||||||
float(item.get("amount", 0.0))
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
for item in capex_by_scenario.get(scenario_id, [])
|
) -> HTMLResponse:
|
||||||
)
|
return templates.TemplateResponse(
|
||||||
opex_total = sum(
|
request,
|
||||||
float(item.get("amount", 0.0))
|
"reporting.html",
|
||||||
for item in opex_by_scenario.get(scenario_id, [])
|
|
||||||
)
|
|
||||||
consumption_total = sum(
|
|
||||||
float(item.get("amount", 0.0))
|
|
||||||
for item in consumption_by_scenario.get(scenario_id, [])
|
|
||||||
)
|
|
||||||
production_total = sum(
|
|
||||||
float(item.get("amount", 0.0))
|
|
||||||
for item in production_by_scenario.get(scenario_id, [])
|
|
||||||
)
|
|
||||||
|
|
||||||
run = runs_by_scenario.get(scenario_id)
|
|
||||||
summary = run["summary"] if run else generate_report([])
|
|
||||||
iterations = run["iterations"] if run else 0
|
|
||||||
mean_value = float(summary.get("mean", 0.0))
|
|
||||||
|
|
||||||
scenario_rows.append(
|
|
||||||
{
|
|
||||||
"scenario_name": scenario_name,
|
|
||||||
"parameter_count": param_count,
|
|
||||||
"parameter_display": _format_int(param_count),
|
|
||||||
"equipment_count": equipment_count,
|
|
||||||
"equipment_display": _format_int(equipment_count),
|
|
||||||
"capex_total": capex_total,
|
|
||||||
"capex_display": _format_currency(capex_total),
|
|
||||||
"opex_total": opex_total,
|
|
||||||
"opex_display": _format_currency(opex_total),
|
|
||||||
"production_total": production_total,
|
|
||||||
"production_display": _format_decimal(production_total),
|
|
||||||
"consumption_total": consumption_total,
|
|
||||||
"consumption_display": _format_decimal(consumption_total),
|
|
||||||
"maintenance_count": maintenance_count,
|
|
||||||
"maintenance_display": _format_int(maintenance_count),
|
|
||||||
"iterations": iterations,
|
|
||||||
"iterations_display": _format_int(iterations),
|
|
||||||
"simulation_mean": mean_value,
|
|
||||||
"simulation_mean_display": _format_decimal(mean_value),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
scenario_labels.append(scenario_name)
|
|
||||||
scenario_capex.append(capex_total)
|
|
||||||
scenario_opex.append(opex_total)
|
|
||||||
|
|
||||||
activity_labels.append(scenario_name)
|
|
||||||
activity_production.append(production_total)
|
|
||||||
activity_consumption.append(consumption_total)
|
|
||||||
|
|
||||||
scenario_rows.sort(key=lambda row: row["scenario_name"].lower())
|
|
||||||
|
|
||||||
all_simulation_results = [
|
|
||||||
{"result": float(getattr(item, "result", 0.0))}
|
|
||||||
for item in db.query(SimulationResult).all()
|
|
||||||
]
|
|
||||||
overall_report = generate_report(all_simulation_results)
|
|
||||||
|
|
||||||
overall_report_metrics = [
|
|
||||||
{
|
{
|
||||||
"label": "Runs",
|
"title": "Reporting",
|
||||||
"value": _format_int(int(overall_report.get("count", 0))),
|
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"label": "Mean",
|
|
||||||
"value": _format_decimal(float(overall_report.get("mean", 0.0))),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Median",
|
|
||||||
"value": _format_decimal(float(overall_report.get("median", 0.0))),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Std Dev",
|
|
||||||
"value": _format_decimal(float(overall_report.get("std_dev", 0.0))),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "95th Percentile",
|
|
||||||
"value": _format_decimal(
|
|
||||||
float(overall_report.get("percentile_95", 0.0))
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "VaR (95%)",
|
|
||||||
"value": _format_decimal(
|
|
||||||
float(overall_report.get("value_at_risk_95", 0.0))
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Expected Shortfall (95%)",
|
|
||||||
"value": _format_decimal(
|
|
||||||
float(overall_report.get("expected_shortfall_95", 0.0))
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
recent_simulations: list[Dict[str, Any]] = [
|
|
||||||
{
|
|
||||||
"scenario_name": run["scenario_name"],
|
|
||||||
"iterations": run["iterations"],
|
|
||||||
"iterations_display": _format_int(run["iterations"]),
|
|
||||||
"mean_display": _format_decimal(
|
|
||||||
float(run["summary"].get("mean", 0.0))
|
|
||||||
),
|
|
||||||
"p95_display": _format_decimal(
|
|
||||||
float(run["summary"].get("percentile_95", 0.0))
|
|
||||||
),
|
|
||||||
}
|
|
||||||
for run in simulation_runs
|
|
||||||
if run["iterations"] > 0
|
|
||||||
]
|
|
||||||
recent_simulations.sort(key=lambda item: item["iterations"], reverse=True)
|
|
||||||
recent_simulations = recent_simulations[:5]
|
|
||||||
|
|
||||||
upcoming_maintenance: list[Dict[str, Any]] = []
|
|
||||||
for record in (
|
|
||||||
db.query(Maintenance)
|
|
||||||
.order_by(Maintenance.maintenance_date.asc())
|
|
||||||
.limit(5)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
maintenance_date = getattr(record, "maintenance_date", None)
|
|
||||||
upcoming_maintenance.append(
|
|
||||||
{
|
|
||||||
"scenario_name": getattr(
|
|
||||||
getattr(record, "scenario", None), "name", "Unknown"
|
|
||||||
),
|
|
||||||
"equipment_name": getattr(
|
|
||||||
getattr(record, "equipment", None), "name", "Unknown"
|
|
||||||
),
|
|
||||||
"date_display": (
|
|
||||||
maintenance_date.strftime("%Y-%m-%d")
|
|
||||||
if maintenance_date
|
|
||||||
else "—"
|
|
||||||
),
|
|
||||||
"cost_display": _format_currency(
|
|
||||||
float(getattr(record, "cost", 0.0))
|
|
||||||
),
|
|
||||||
"description": getattr(record, "description", "") or "—",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cost_chart_has_data = any(value > 0 for value in scenario_capex) or any(
|
|
||||||
value > 0 for value in scenario_opex
|
|
||||||
)
|
)
|
||||||
activity_chart_has_data = any(
|
|
||||||
value > 0 for value in activity_production
|
|
||||||
) or any(value > 0 for value in activity_consumption)
|
|
||||||
|
|
||||||
scenario_cost_chart: Dict[str, list[Any]] = {
|
|
||||||
"labels": scenario_labels,
|
|
||||||
"capex": scenario_capex,
|
|
||||||
"opex": scenario_opex,
|
|
||||||
}
|
|
||||||
scenario_activity_chart: Dict[str, list[Any]] = {
|
|
||||||
"labels": activity_labels,
|
|
||||||
"production": activity_production,
|
|
||||||
"consumption": activity_consumption,
|
|
||||||
}
|
|
||||||
|
|
||||||
summary_metrics = [
|
@router.get(
|
||||||
{"label": "Active Scenarios", "value": _format_int(len(scenarios))},
|
"/ui/settings",
|
||||||
{"label": "Parameters", "value": _format_int(total_parameters)},
|
response_class=HTMLResponse,
|
||||||
{"label": "CAPEX Total", "value": _format_currency(total_capex)},
|
include_in_schema=False,
|
||||||
{"label": "OPEX Total", "value": _format_currency(total_opex)},
|
name="ui.settings",
|
||||||
{"label": "Equipment Assets", "value": _format_int(total_equipment)},
|
)
|
||||||
|
def settings_page(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"settings.html",
|
||||||
{
|
{
|
||||||
"label": "Maintenance Events",
|
"title": "Settings",
|
||||||
"value": _format_int(total_maintenance_events),
|
|
||||||
},
|
},
|
||||||
{"label": "Consumption", "value": _format_decimal(total_consumption)},
|
)
|
||||||
{"label": "Production", "value": _format_decimal(total_production)},
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/theme-settings",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="ui.theme_settings",
|
||||||
|
)
|
||||||
|
def theme_settings_page(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_any_role_html(*READ_ROLES)),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"theme_settings.html",
|
||||||
{
|
{
|
||||||
"label": "Simulation Iterations",
|
"title": "Theme Settings",
|
||||||
"value": _format_int(total_simulation_iterations),
|
|
||||||
},
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/ui/currencies",
|
||||||
|
response_class=HTMLResponse,
|
||||||
|
include_in_schema=False,
|
||||||
|
name="ui.currencies",
|
||||||
|
)
|
||||||
|
def currencies_page(
|
||||||
|
request: Request,
|
||||||
|
_: User = Depends(require_roles_html(*MANAGE_ROLES)),
|
||||||
|
) -> HTMLResponse:
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"currencies.html",
|
||||||
{
|
{
|
||||||
"label": "Maintenance Cost",
|
"title": "Currency Management",
|
||||||
"value": _format_currency(total_maintenance_cost),
|
|
||||||
},
|
},
|
||||||
]
|
)
|
||||||
|
|
||||||
return {
|
|
||||||
"summary_metrics": summary_metrics,
|
|
||||||
"scenario_rows": scenario_rows,
|
|
||||||
"overall_report_metrics": overall_report_metrics,
|
|
||||||
"recent_simulations": recent_simulations,
|
|
||||||
"upcoming_maintenance": upcoming_maintenance,
|
|
||||||
"scenario_cost_chart": scenario_cost_chart,
|
|
||||||
"scenario_activity_chart": scenario_activity_chart,
|
|
||||||
"cost_chart_has_data": cost_chart_has_data,
|
|
||||||
"activity_chart_has_data": activity_chart_has_data,
|
|
||||||
"report_available": overall_report.get("count", 0) > 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_class=HTMLResponse)
|
|
||||||
async def dashboard_root(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the primary dashboard landing page."""
|
|
||||||
return _render(request, "Dashboard.html", _load_dashboard(db))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/dashboard", response_class=HTMLResponse)
|
|
||||||
async def dashboard(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the legacy dashboard route for backward compatibility."""
|
|
||||||
return _render(request, "Dashboard.html", _load_dashboard(db))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/dashboard/data", response_class=JSONResponse)
|
|
||||||
async def dashboard_data(db: Session = Depends(get_db)) -> JSONResponse:
|
|
||||||
"""Expose dashboard aggregates as JSON for client-side refreshes."""
|
|
||||||
return JSONResponse(_load_dashboard(db))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/scenarios", response_class=HTMLResponse)
|
|
||||||
async def scenario_form(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the scenario creation form."""
|
|
||||||
context = _load_scenarios(db)
|
|
||||||
return _render(request, "ScenarioForm.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/parameters", response_class=HTMLResponse)
|
|
||||||
async def parameter_form(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the parameter input form."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_parameters(db))
|
|
||||||
return _render(request, "ParameterInput.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/costs", response_class=HTMLResponse)
|
|
||||||
async def costs_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the costs view with CAPEX and OPEX data."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_costs(db))
|
|
||||||
context.update(_load_currencies(db))
|
|
||||||
return _render(request, "costs.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/consumption", response_class=HTMLResponse)
|
|
||||||
async def consumption_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the consumption view with scenario consumption data."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_consumption(db))
|
|
||||||
context["unit_options"] = MEASUREMENT_UNITS
|
|
||||||
return _render(request, "consumption.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/production", response_class=HTMLResponse)
|
|
||||||
async def production_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the production view with scenario production data."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_production(db))
|
|
||||||
context["unit_options"] = MEASUREMENT_UNITS
|
|
||||||
return _render(request, "production.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/equipment", response_class=HTMLResponse)
|
|
||||||
async def equipment_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the equipment view with scenario equipment data."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_equipment(db))
|
|
||||||
return _render(request, "equipment.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/maintenance", response_class=HTMLResponse)
|
|
||||||
async def maintenance_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the maintenance view with scenario maintenance data."""
|
|
||||||
context: Dict[str, Any] = {}
|
|
||||||
context.update(_load_scenarios(db))
|
|
||||||
context.update(_load_equipment(db))
|
|
||||||
context.update(_load_maintenance(db))
|
|
||||||
return _render(request, "maintenance.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/simulations", response_class=HTMLResponse)
|
|
||||||
async def simulations_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the simulations view with scenario information and recent runs."""
|
|
||||||
return _render(request, "simulations.html", _load_simulations(db))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/reporting", response_class=HTMLResponse)
|
|
||||||
async def reporting_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the reporting view with scenario KPI summaries."""
|
|
||||||
return _render(request, "reporting.html", _load_reporting(db))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/settings", response_class=HTMLResponse)
|
|
||||||
async def settings_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the settings landing page."""
|
|
||||||
context = _load_css_settings(db)
|
|
||||||
return _render(request, "settings.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/currencies", response_class=HTMLResponse)
|
|
||||||
async def currencies_view(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the currency administration page with full currency context."""
|
|
||||||
context = _load_currency_settings(db)
|
|
||||||
return _render(request, "currencies.html", context)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/login", response_class=HTMLResponse)
|
|
||||||
async def login_page(request: Request):
|
|
||||||
return _render(request, "login.html")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/register", response_class=HTMLResponse)
|
|
||||||
async def register_page(request: Request):
|
|
||||||
return _render(request, "register.html")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/profile", response_class=HTMLResponse)
|
|
||||||
async def profile_page(request: Request):
|
|
||||||
return _render(request, "profile.html")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/forgot-password", response_class=HTMLResponse)
|
|
||||||
async def forgot_password_page(request: Request):
|
|
||||||
return _render(request, "forgot_password.html")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/theme-settings", response_class=HTMLResponse)
|
|
||||||
async def theme_settings_page(request: Request, db: Session = Depends(get_db)):
|
|
||||||
"""Render the theme settings page."""
|
|
||||||
context = _load_css_settings(db)
|
|
||||||
return _render(request, "theme_settings.html", context)
|
|
||||||
|
|||||||
107
routes/users.py
107
routes/users.py
@@ -1,107 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from config.database import get_db
|
|
||||||
from models.user import User
|
|
||||||
from services.security import create_access_token, get_current_user
|
|
||||||
from schemas.user import (
|
|
||||||
PasswordReset,
|
|
||||||
PasswordResetRequest,
|
|
||||||
UserCreate,
|
|
||||||
UserInDB,
|
|
||||||
UserLogin,
|
|
||||||
UserUpdate,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/users", tags=["users"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/register", response_model=UserInDB, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def register_user(user: UserCreate, db: Session = Depends(get_db)):
|
|
||||||
db_user = db.query(User).filter(User.username == user.username).first()
|
|
||||||
if db_user:
|
|
||||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Username already registered")
|
|
||||||
db_user = db.query(User).filter(User.email == user.email).first()
|
|
||||||
if db_user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
|
||||||
|
|
||||||
# Get or create default role
|
|
||||||
from models.role import Role
|
|
||||||
default_role = db.query(Role).filter(Role.name == "user").first()
|
|
||||||
if not default_role:
|
|
||||||
default_role = Role(name="user")
|
|
||||||
db.add(default_role)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(default_role)
|
|
||||||
|
|
||||||
new_user = User(username=user.username, email=user.email,
|
|
||||||
role_id=default_role.id)
|
|
||||||
new_user.set_password(user.password)
|
|
||||||
db.add(new_user)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(new_user)
|
|
||||||
return new_user
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/login")
|
|
||||||
async def login_user(user: UserLogin, db: Session = Depends(get_db)):
|
|
||||||
db_user = db.query(User).filter(User.username == user.username).first()
|
|
||||||
if not db_user or not db_user.check_password(user.password):
|
|
||||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Incorrect username or password")
|
|
||||||
access_token = create_access_token(subject=db_user.username)
|
|
||||||
return {"access_token": access_token, "token_type": "bearer"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/me")
|
|
||||||
async def read_users_me(current_user: User = Depends(get_current_user)):
|
|
||||||
return current_user
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/me", response_model=UserInDB)
|
|
||||||
async def update_user_me(user_update: UserUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
|
||||||
if user_update.username and user_update.username != current_user.username:
|
|
||||||
existing_user = db.query(User).filter(
|
|
||||||
User.username == user_update.username).first()
|
|
||||||
if existing_user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Username already taken")
|
|
||||||
setattr(current_user, "username", user_update.username)
|
|
||||||
|
|
||||||
if user_update.email and user_update.email != current_user.email:
|
|
||||||
existing_user = db.query(User).filter(
|
|
||||||
User.email == user_update.email).first()
|
|
||||||
if existing_user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
|
||||||
setattr(current_user, "email", user_update.email)
|
|
||||||
|
|
||||||
if user_update.password:
|
|
||||||
current_user.set_password(user_update.password)
|
|
||||||
|
|
||||||
db.add(current_user)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(current_user)
|
|
||||||
return current_user
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/forgot-password")
|
|
||||||
async def forgot_password(request: PasswordResetRequest):
|
|
||||||
# In a real application, this would send an email with a reset token
|
|
||||||
return {"message": "Password reset email sent (not really)"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/reset-password")
|
|
||||||
async def reset_password(request: PasswordReset, db: Session = Depends(get_db)):
|
|
||||||
# In a real application, the token would be verified
|
|
||||||
user = db.query(User).filter(User.username ==
|
|
||||||
request.token).first() # Use token as username for test
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid token or user")
|
|
||||||
user.set_password(request.new_password)
|
|
||||||
db.add(user)
|
|
||||||
db.commit()
|
|
||||||
return {"message": "Password has been reset successfully"}
|
|
||||||
2
run_docker.ps1
Normal file
2
run_docker.ps1
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
docker run -d --name calminer-app --env-file .env -p 8003:8003 -v "${PWD}\logs:/app/logs" --restart unless-stopped calminer:latest
|
||||||
|
docker logs -f calminer-app
|
||||||
67
schemas/auth.py
Normal file
67
schemas/auth.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
|
||||||
|
|
||||||
|
|
||||||
|
class FormModel(BaseModel):
|
||||||
|
"""Base Pydantic model for HTML form submissions."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", str_strip_whitespace=True)
|
||||||
|
|
||||||
|
|
||||||
|
class RegistrationForm(FormModel):
|
||||||
|
username: str = Field(min_length=3, max_length=128)
|
||||||
|
email: str = Field(min_length=5, max_length=255)
|
||||||
|
password: str = Field(min_length=8, max_length=256)
|
||||||
|
confirm_password: str
|
||||||
|
|
||||||
|
@field_validator("email")
|
||||||
|
@classmethod
|
||||||
|
def validate_email(cls, value: str) -> str:
|
||||||
|
if "@" not in value or value.startswith("@") or value.endswith("@"):
|
||||||
|
raise ValueError("Invalid email address.")
|
||||||
|
local, domain = value.split("@", 1)
|
||||||
|
if not local or "." not in domain:
|
||||||
|
raise ValueError("Invalid email address.")
|
||||||
|
return value.lower()
|
||||||
|
|
||||||
|
@field_validator("confirm_password")
|
||||||
|
@classmethod
|
||||||
|
def passwords_match(cls, value: str, info: ValidationInfo) -> str:
|
||||||
|
password = info.data.get("password")
|
||||||
|
if password != value:
|
||||||
|
raise ValueError("Passwords do not match.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class LoginForm(FormModel):
|
||||||
|
username: str = Field(min_length=1, max_length=255)
|
||||||
|
password: str = Field(min_length=1, max_length=256)
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetRequestForm(FormModel):
|
||||||
|
email: str = Field(min_length=5, max_length=255)
|
||||||
|
|
||||||
|
@field_validator("email")
|
||||||
|
@classmethod
|
||||||
|
def validate_email(cls, value: str) -> str:
|
||||||
|
if "@" not in value or value.startswith("@") or value.endswith("@"):
|
||||||
|
raise ValueError("Invalid email address.")
|
||||||
|
local, domain = value.split("@", 1)
|
||||||
|
if not local or "." not in domain:
|
||||||
|
raise ValueError("Invalid email address.")
|
||||||
|
return value.lower()
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetForm(FormModel):
|
||||||
|
token: str = Field(min_length=1)
|
||||||
|
password: str = Field(min_length=8, max_length=256)
|
||||||
|
confirm_password: str
|
||||||
|
|
||||||
|
@field_validator("confirm_password")
|
||||||
|
@classmethod
|
||||||
|
def reset_passwords_match(cls, value: str, info: ValidationInfo) -> str:
|
||||||
|
password = info.data.get("password")
|
||||||
|
if password != value:
|
||||||
|
raise ValueError("Passwords do not match.")
|
||||||
|
return value
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user