Compare commits
9 Commits
feat/datab
...
97b1c0360b
| Author | SHA1 | Date | |
|---|---|---|---|
| 97b1c0360b | |||
| e8a86b15e4 | |||
| 70db34d088 | |||
| 0550928a2f | |||
| ec56099e2a | |||
| c71908c8d9 | |||
| 75f533b87b | |||
| 5b1322ddbc | |||
| 713c9feebb |
111
.gitea/actions/setup-python-env/action.yml
Normal file
111
.gitea/actions/setup-python-env/action.yml
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
name: Setup Python Environment
|
||||||
|
description: Configure Python, proxies, dependencies, and optional database setup for CI jobs.
|
||||||
|
author: CalMiner Team
|
||||||
|
inputs:
|
||||||
|
python-version:
|
||||||
|
description: Python version to install.
|
||||||
|
required: false
|
||||||
|
default: "3.10"
|
||||||
|
install-playwright:
|
||||||
|
description: Install Playwright browsers when true.
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
install-requirements:
|
||||||
|
description: Space-delimited list of requirement files to install.
|
||||||
|
required: false
|
||||||
|
default: "requirements.txt requirements-test.txt"
|
||||||
|
run-db-setup:
|
||||||
|
description: Run database wait and setup scripts when true.
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
db-dry-run:
|
||||||
|
description: Execute setup script dry run before live run when true.
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ inputs.python-version }}
|
||||||
|
- name: Configure apt proxy
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
PROXY_HOST="http://apt-cacher:3142"
|
||||||
|
if ! curl -fsS --connect-timeout 3 "${PROXY_HOST}" >/dev/null; then
|
||||||
|
PROXY_HOST="http://192.168.88.14:3142"
|
||||||
|
fi
|
||||||
|
echo "Using APT proxy ${PROXY_HOST}"
|
||||||
|
{
|
||||||
|
echo "http_proxy=${PROXY_HOST}"
|
||||||
|
echo "https_proxy=${PROXY_HOST}"
|
||||||
|
echo "HTTP_PROXY=${PROXY_HOST}"
|
||||||
|
echo "HTTPS_PROXY=${PROXY_HOST}"
|
||||||
|
} >> "$GITHUB_ENV"
|
||||||
|
sudo tee /etc/apt/apt.conf.d/01proxy >/dev/null <<EOF
|
||||||
|
Acquire::http::Proxy "${PROXY_HOST}";
|
||||||
|
Acquire::https::Proxy "${PROXY_HOST}";
|
||||||
|
EOF
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
requirements="${{ inputs.install-requirements }}"
|
||||||
|
if [ -n "${requirements}" ]; then
|
||||||
|
for requirement in ${requirements}; do
|
||||||
|
if [ -f "${requirement}" ]; then
|
||||||
|
pip install -r "${requirement}"
|
||||||
|
else
|
||||||
|
echo "Requirement file ${requirement} not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
if: ${{ inputs.install-playwright == 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python -m playwright install --with-deps
|
||||||
|
- name: Wait for database service
|
||||||
|
if: ${{ inputs.run-db-setup == 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python - <<'PY'
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
dsn = (
|
||||||
|
f"dbname={os.environ['DATABASE_SUPERUSER_DB']} "
|
||||||
|
f"user={os.environ['DATABASE_SUPERUSER']} "
|
||||||
|
f"password={os.environ['DATABASE_SUPERUSER_PASSWORD']} "
|
||||||
|
f"host={os.environ['DATABASE_HOST']} "
|
||||||
|
f"port={os.environ['DATABASE_PORT']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
for attempt in range(30):
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(dsn):
|
||||||
|
break
|
||||||
|
except psycopg2.OperationalError:
|
||||||
|
time.sleep(2)
|
||||||
|
else:
|
||||||
|
raise SystemExit("Postgres service did not become available")
|
||||||
|
PY
|
||||||
|
- name: Run database setup (dry run)
|
||||||
|
if: ${{ inputs.run-db-setup == 'true' && inputs.db-dry-run == 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
||||||
|
- name: Run database setup
|
||||||
|
if: ${{ inputs.run-db-setup == 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
||||||
@@ -1,11 +1,16 @@
|
|||||||
name: Build and Push Docker Image
|
name: Build and Push Docker Image
|
||||||
on:
|
on:
|
||||||
push:
|
workflow_run:
|
||||||
|
workflows:
|
||||||
|
- Run Tests
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-push:
|
build-and-push:
|
||||||
|
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
DEFAULT_BRANCH: main
|
DEFAULT_BRANCH: main
|
||||||
@@ -14,6 +19,8 @@ jobs:
|
|||||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
||||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
||||||
|
WORKFLOW_RUN_HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||||
|
WORKFLOW_RUN_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -26,6 +33,14 @@ jobs:
|
|||||||
event_name="${GITHUB_EVENT_NAME:-}"
|
event_name="${GITHUB_EVENT_NAME:-}"
|
||||||
sha="${GITHUB_SHA:-}"
|
sha="${GITHUB_SHA:-}"
|
||||||
|
|
||||||
|
if [ -z "$ref_name" ] && [ -n "${WORKFLOW_RUN_HEAD_BRANCH:-}" ]; then
|
||||||
|
ref_name="${WORKFLOW_RUN_HEAD_BRANCH}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$sha" ] && [ -n "${WORKFLOW_RUN_HEAD_SHA:-}" ]; then
|
||||||
|
sha="${WORKFLOW_RUN_HEAD_SHA}"
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ]; then
|
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ]; then
|
||||||
echo "on_default=true" >> "$GITHUB_OUTPUT"
|
echo "on_default=true" >> "$GITHUB_OUTPUT"
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
name: Deploy to Server
|
name: Deploy to Server
|
||||||
on:
|
on:
|
||||||
push:
|
workflow_run:
|
||||||
|
workflows:
|
||||||
|
- Build and Push Docker Image
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
|
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
DEFAULT_BRANCH: main
|
DEFAULT_BRANCH: main
|
||||||
@@ -14,6 +19,8 @@ jobs:
|
|||||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
||||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
||||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
||||||
|
WORKFLOW_RUN_HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||||
|
WORKFLOW_RUN_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
steps:
|
steps:
|
||||||
- name: SSH and deploy
|
- name: SSH and deploy
|
||||||
uses: appleboy/ssh-action@master
|
uses: appleboy/ssh-action@master
|
||||||
@@ -22,7 +29,15 @@ jobs:
|
|||||||
username: ${{ secrets.SSH_USERNAME }}
|
username: ${{ secrets.SSH_USERNAME }}
|
||||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
script: |
|
script: |
|
||||||
docker pull ${{ env.REGISTRY_URL }}/${{ env.REGISTRY_ORG }}/${{ env.REGISTRY_IMAGE_NAME }}:latest
|
IMAGE_SHA="${{ env.WORKFLOW_RUN_HEAD_SHA }}"
|
||||||
|
IMAGE_PATH="${{ env.REGISTRY_URL }}/${{ env.REGISTRY_ORG }}/${{ env.REGISTRY_IMAGE_NAME }}"
|
||||||
|
|
||||||
|
if [ -z "$IMAGE_SHA" ]; then
|
||||||
|
echo "Missing workflow run head SHA; aborting deployment." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker pull "$IMAGE_PATH:$IMAGE_SHA"
|
||||||
docker stop calminer || true
|
docker stop calminer || true
|
||||||
docker rm calminer || true
|
docker rm calminer || true
|
||||||
docker run -d --name calminer -p 8000:8000 \
|
docker run -d --name calminer -p 8000:8000 \
|
||||||
@@ -33,4 +48,4 @@ jobs:
|
|||||||
-e DATABASE_PASSWORD=${{ secrets.DATABASE_PASSWORD }} \
|
-e DATABASE_PASSWORD=${{ secrets.DATABASE_PASSWORD }} \
|
||||||
-e DATABASE_NAME=${{ secrets.DATABASE_NAME }} \
|
-e DATABASE_NAME=${{ secrets.DATABASE_NAME }} \
|
||||||
-e DATABASE_SCHEMA=${{ secrets.DATABASE_SCHEMA }} \
|
-e DATABASE_SCHEMA=${{ secrets.DATABASE_SCHEMA }} \
|
||||||
${{ secrets.REGISTRY_URL }}/${{ secrets.REGISTRY_USERNAME }}/calminer:latest
|
"$IMAGE_PATH:$IMAGE_SHA"
|
||||||
|
|||||||
@@ -2,7 +2,25 @@ name: Run Tests
|
|||||||
on: [push]
|
on: [push]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
tests:
|
||||||
|
name: ${{ matrix.target }} tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: postgresql
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: "5432"
|
||||||
|
DATABASE_NAME: calminer_ci
|
||||||
|
DATABASE_USER: calminer
|
||||||
|
DATABASE_PASSWORD: secret
|
||||||
|
DATABASE_SCHEMA: public
|
||||||
|
DATABASE_SUPERUSER: calminer
|
||||||
|
DATABASE_SUPERUSER_PASSWORD: secret
|
||||||
|
DATABASE_SUPERUSER_DB: calminer_ci
|
||||||
|
DATABASE_URL: postgresql+psycopg2://calminer:secret@postgres:5432/calminer_ci
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target: [unit, e2e, lint]
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
@@ -10,116 +28,24 @@ jobs:
|
|||||||
POSTGRES_DB: calminer_ci
|
POSTGRES_DB: calminer_ci
|
||||||
POSTGRES_USER: calminer
|
POSTGRES_USER: calminer
|
||||||
POSTGRES_PASSWORD: secret
|
POSTGRES_PASSWORD: secret
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
options: >-
|
options: >-
|
||||||
--health-cmd "pg_isready -U calminer -d calminer_ci"
|
--health-cmd "pg_isready -U calminer -d calminer_ci"
|
||||||
--health-interval 10s
|
--health-interval 10s
|
||||||
--health-timeout 5s
|
--health-timeout 5s
|
||||||
--health-retries 10
|
--health-retries 10
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- name: Prepare Python environment
|
||||||
uses: actions/setup-python@v5
|
uses: ./.gitea/actions/setup-python-env
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
install-playwright: ${{ matrix.target == 'e2e' }}
|
||||||
- name: Configure apt proxy
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
PROXY_HOST="http://apt-cacher:3142"
|
|
||||||
if ! curl -fsS --connect-timeout 3 "${PROXY_HOST}" >/dev/null; then
|
|
||||||
PROXY_HOST="http://192.168.88.14:3142"
|
|
||||||
fi
|
|
||||||
echo "Using APT proxy ${PROXY_HOST}"
|
|
||||||
echo "http_proxy=${PROXY_HOST}" >> "$GITHUB_ENV"
|
|
||||||
echo "https_proxy=${PROXY_HOST}" >> "$GITHUB_ENV"
|
|
||||||
echo "HTTP_PROXY=${PROXY_HOST}" >> "$GITHUB_ENV"
|
|
||||||
echo "HTTPS_PROXY=${PROXY_HOST}" >> "$GITHUB_ENV"
|
|
||||||
sudo tee /etc/apt/apt.conf.d/01proxy >/dev/null <<EOF
|
|
||||||
Acquire::http::Proxy "${PROXY_HOST}";
|
|
||||||
Acquire::https::Proxy "${PROXY_HOST}";
|
|
||||||
EOF
|
|
||||||
# - name: Cache pip
|
|
||||||
# uses: actions/cache@v4
|
|
||||||
# with:
|
|
||||||
# path: ~/.cache/pip
|
|
||||||
# key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'requirements-test.txt') }}
|
|
||||||
# restore-keys: |
|
|
||||||
# ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
|
||||||
# ${{ runner.os }}-pip-
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install -r requirements.txt
|
|
||||||
pip install -r requirements-test.txt
|
|
||||||
- name: Install Playwright browsers
|
|
||||||
run: |
|
|
||||||
python -m playwright install --with-deps
|
|
||||||
- name: Wait for database service
|
|
||||||
env:
|
|
||||||
DATABASE_DRIVER: postgresql
|
|
||||||
DATABASE_HOST: postgres
|
|
||||||
DATABASE_PORT: "5432"
|
|
||||||
DATABASE_NAME: calminer_ci
|
|
||||||
DATABASE_USER: calminer
|
|
||||||
DATABASE_PASSWORD: secret
|
|
||||||
DATABASE_SCHEMA: public
|
|
||||||
DATABASE_SUPERUSER: calminer
|
|
||||||
DATABASE_SUPERUSER_PASSWORD: secret
|
|
||||||
DATABASE_SUPERUSER_DB: calminer_ci
|
|
||||||
run: |
|
|
||||||
python - <<'PY'
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
dsn = (
|
|
||||||
f"dbname={os.environ['DATABASE_SUPERUSER_DB']} "
|
|
||||||
f"user={os.environ['DATABASE_SUPERUSER']} "
|
|
||||||
f"password={os.environ['DATABASE_SUPERUSER_PASSWORD']} "
|
|
||||||
f"host={os.environ['DATABASE_HOST']} "
|
|
||||||
f"port={os.environ['DATABASE_PORT']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for attempt in range(30):
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(dsn):
|
|
||||||
break
|
|
||||||
except psycopg2.OperationalError:
|
|
||||||
time.sleep(2)
|
|
||||||
else:
|
|
||||||
raise SystemExit("Postgres service did not become available")
|
|
||||||
PY
|
|
||||||
- name: Run database setup (dry run)
|
|
||||||
env:
|
|
||||||
DATABASE_DRIVER: postgresql
|
|
||||||
DATABASE_HOST: postgres
|
|
||||||
DATABASE_PORT: "5432"
|
|
||||||
DATABASE_NAME: calminer_ci
|
|
||||||
DATABASE_USER: calminer
|
|
||||||
DATABASE_PASSWORD: secret
|
|
||||||
DATABASE_SCHEMA: public
|
|
||||||
DATABASE_SUPERUSER: calminer
|
|
||||||
DATABASE_SUPERUSER_PASSWORD: secret
|
|
||||||
DATABASE_SUPERUSER_DB: calminer_ci
|
|
||||||
run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
|
||||||
- name: Run database setup
|
|
||||||
env:
|
|
||||||
DATABASE_DRIVER: postgresql
|
|
||||||
DATABASE_HOST: postgres
|
|
||||||
DATABASE_PORT: "5432"
|
|
||||||
DATABASE_NAME: calminer_ci
|
|
||||||
DATABASE_USER: calminer
|
|
||||||
DATABASE_PASSWORD: secret
|
|
||||||
DATABASE_SCHEMA: public
|
|
||||||
DATABASE_SUPERUSER: calminer
|
|
||||||
DATABASE_SUPERUSER_PASSWORD: secret
|
|
||||||
DATABASE_SUPERUSER_DB: calminer_ci
|
|
||||||
run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
env:
|
run: |
|
||||||
DATABASE_URL: postgresql+psycopg2://calminer:secret@postgres:5432/calminer_ci
|
if [ "${{ matrix.target }}" = "unit" ]; then
|
||||||
DATABASE_SCHEMA: public
|
pytest tests/unit
|
||||||
run: pytest
|
elif [ "${{ matrix.target }}" = "lint" ]; then
|
||||||
|
ruff check .
|
||||||
|
else
|
||||||
|
pytest tests/e2e
|
||||||
|
fi
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -45,3 +45,6 @@ logs/
|
|||||||
# SQLite database
|
# SQLite database
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
test*.db
|
test*.db
|
||||||
|
|
||||||
|
# Docker files
|
||||||
|
.runner
|
||||||
|
|||||||
8
.prettierrc
Normal file
8
.prettierrc
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"semi": true,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"printWidth": 80,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false
|
||||||
|
}
|
||||||
@@ -21,13 +21,18 @@ A range of features are implemented to support these functionalities.
|
|||||||
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
||||||
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
||||||
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
||||||
- **Modular Frontend Scripts**: Page-specific interactions now live in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
- **Settings Center**: The Settings landing page exposes visual theme controls and links to currency administration, backed by persisted application settings and environment overrides.
|
||||||
|
- **Modular Frontend Scripts**: Page-specific interactions in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
||||||
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
||||||
|
|
||||||
## Documentation & quickstart
|
## Documentation & quickstart
|
||||||
|
|
||||||
This repository contains detailed developer and architecture documentation in the `docs/` folder.
|
This repository contains detailed developer and architecture documentation in the `docs/` folder.
|
||||||
|
|
||||||
|
### Settings overview
|
||||||
|
|
||||||
|
The Settings page (`/ui/settings`) lets administrators adjust global theme colors stored in the `application_setting` table. Changes are instantly applied across the UI. Environment variables prefixed with `CALMINER_THEME_` (for example, `CALMINER_THEME_COLOR_PRIMARY`) automatically override individual CSS variables and render as read-only in the form, ensuring deployment-time overrides take precedence while remaining visible to operators.
|
||||||
|
|
||||||
[Quickstart](docs/quickstart.md) contains developer quickstart, migrations, testing and current status.
|
[Quickstart](docs/quickstart.md) contains developer quickstart, migrations, testing and current status.
|
||||||
|
|
||||||
Key architecture documents: see [architecture](docs/architecture/README.md) for the arc42-based architecture documentation.
|
Key architecture documents: see [architecture](docs/architecture/README.md) for the arc42-based architecture documentation.
|
||||||
|
|||||||
@@ -56,3 +56,11 @@ DATABASE_URL = _build_database_url()
|
|||||||
engine = create_engine(DATABASE_URL, echo=True, future=True)
|
engine = create_engine(DATABASE_URL, echo=True, future=True)
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
def get_db():
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
---
|
---
|
||||||
title: "05 — Building Block View"
|
title: '05 — Building Block View'
|
||||||
description: "Explain the static structure: modules, components, services and their relationships."
|
description: 'Explain the static structure: modules, components, services and their relationships.'
|
||||||
status: draft
|
status: draft
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<!-- markdownlint-disable-next-line MD025 -->
|
||||||
|
|
||||||
# 05 — Building Block View
|
# 05 — Building Block View
|
||||||
|
|
||||||
## Architecture overview
|
## Architecture overview
|
||||||
@@ -25,6 +27,7 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
||||||
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
||||||
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
||||||
|
- `services/settings.py`: manages application settings backed by the `application_setting` table, including CSS variable defaults, persistence, and environment-driven overrides that surface in both the API and UI.
|
||||||
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
||||||
|
|
||||||
### Frontend
|
### Frontend
|
||||||
@@ -32,12 +35,152 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
||||||
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
||||||
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
||||||
|
- `templates/settings.html`: Settings hub that renders theme controls and environment override tables using metadata provided by `routes/ui.py`.
|
||||||
|
- `static/js/settings.js`: applies client-side validation, form submission, and live CSS updates for theme changes, respecting environment-managed variables returned by the API.
|
||||||
|
|
||||||
### Middleware & Utilities
|
### Middleware & Utilities
|
||||||
|
|
||||||
- **Middleware** (`middleware/validation.py`): applies JSON validation before requests reach routers.
|
- **Middleware** (`middleware/validation.py`): applies JSON validation before requests reach routers.
|
||||||
- **Testing** (`tests/unit/`): pytest suite covering route and service behavior, including UI rendering checks and negative-path router validation tests to ensure consistent HTTP error semantics. Playwright end-to-end coverage is planned for core smoke flows (dashboard load, scenario inputs, reporting) and will attach in CI once scaffolding is completed.
|
- **Testing** (`tests/unit/`): pytest suite covering route and service behavior, including UI rendering checks and negative-path router validation tests to ensure consistent HTTP error semantics. Playwright end-to-end coverage is planned for core smoke flows (dashboard load, scenario inputs, reporting) and will attach in CI once scaffolding is completed.
|
||||||
|
|
||||||
|
### Component Diagram
|
||||||
|
|
||||||
|
# System Architecture — Mermaid Diagram
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph LR
|
||||||
|
%% Direction
|
||||||
|
%% LR = left-to-right for a wide architecture view
|
||||||
|
|
||||||
|
%% === Clients ===
|
||||||
|
U["User (Browser)"]
|
||||||
|
|
||||||
|
%% === Frontend ===
|
||||||
|
subgraph FE[Frontend]
|
||||||
|
TPL["Jinja2 Templates\n(templates/)\n• base layout + sidebar"]
|
||||||
|
PARTS["Reusable Partials\n(templates/partials/components.html)\n• inputs • empty states • table wrappers"]
|
||||||
|
STATIC["Static Assets\n(static/)\n• CSS: static/css/main.css (palette via CSS vars)\n• JS: static/js/*.js (page modules)"]
|
||||||
|
SETPAGE["Settings View\n(templates/settings.html)"]
|
||||||
|
SETJS["Settings Logic\n(static/js/settings.js)\n• validation • submit • live CSS updates"]
|
||||||
|
end
|
||||||
|
|
||||||
|
%% === Backend ===
|
||||||
|
subgraph BE[Backend FastAPI]
|
||||||
|
MAIN["FastAPI App\n(main.py)\n• routers • middleware • startup/shutdown"]
|
||||||
|
|
||||||
|
subgraph ROUTES[Routers]
|
||||||
|
R_SCN["scenarios"]
|
||||||
|
R_PAR["parameters"]
|
||||||
|
R_CST["costs"]
|
||||||
|
R_CONS["consumption"]
|
||||||
|
R_PROD["production"]
|
||||||
|
R_EQP["equipment"]
|
||||||
|
R_MNT["maintenance"]
|
||||||
|
R_SIM["simulations"]
|
||||||
|
R_REP["reporting"]
|
||||||
|
R_UI["ui.py (metadata for UI)"]
|
||||||
|
DEP["dependencies.get_db\n(shared SQLAlchemy session)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph SRV[Services]
|
||||||
|
S_BLL["Business Logic Layer\n• orchestrates models + calc"]
|
||||||
|
S_REP["Reporting Calculations"]
|
||||||
|
S_SIM["Monte Carlo\n(simulation scaffolding)"]
|
||||||
|
S_SET["Settings Manager\n(services/settings.py)\n• defaults via CSS vars\n• persistence in DB\n• env overrides\n• surfaces to API & UI"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph MOD[Models]
|
||||||
|
M_SCN["Scenario"]
|
||||||
|
M_CAP["CapEx"]
|
||||||
|
M_OPEX["OpEx"]
|
||||||
|
M_CONS["Consumption"]
|
||||||
|
M_PROD["ProductionOutput"]
|
||||||
|
M_EQP["Equipment"]
|
||||||
|
M_MNT["Maintenance"]
|
||||||
|
M_SIMR["SimulationResult"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph DB[Database Layer]
|
||||||
|
CFG["config/database.py\n(SQLAlchemy engine & sessions)"]
|
||||||
|
PG[("PostgreSQL")]
|
||||||
|
APPSET["application_setting table"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
%% === Middleware & Utilities ===
|
||||||
|
subgraph MW[Middleware & Utilities]
|
||||||
|
VAL["JSON Validation Middleware\n(middleware/validation.py)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph TEST[Testing]
|
||||||
|
UNIT["pytest unit tests\n(tests/unit/)\n• routes • services • UI rendering\n• negative-path validation"]
|
||||||
|
E2E["Playwright E2E (planned)\n• dashboard • scenario inputs • reporting\n• attach in CI"]
|
||||||
|
end
|
||||||
|
|
||||||
|
%% ===================== Edges / Flows =====================
|
||||||
|
%% User to Frontend/Backend
|
||||||
|
U -->|HTTP GET| MAIN
|
||||||
|
U --> TPL
|
||||||
|
TPL -->|server-rendered HTML| U
|
||||||
|
STATIC --> U
|
||||||
|
PARTS --> TPL
|
||||||
|
SETPAGE --> U
|
||||||
|
SETJS --> U
|
||||||
|
|
||||||
|
%% Frontend to Routers (AJAX/form submits)
|
||||||
|
SETJS -->|fetch/POST| R_UI
|
||||||
|
TPL -->|form submit / fetch| ROUTES
|
||||||
|
|
||||||
|
%% FastAPI app wiring and middleware
|
||||||
|
VAL --> MAIN
|
||||||
|
MAIN --> ROUTES
|
||||||
|
|
||||||
|
%% Routers to Services
|
||||||
|
ROUTES -->|calls| SRV
|
||||||
|
R_REP -->|calc| S_REP
|
||||||
|
R_SIM -->|run| S_SIM
|
||||||
|
R_UI -->|read/write settings meta| S_SET
|
||||||
|
|
||||||
|
%% Services to Models & DB
|
||||||
|
SRV --> MOD
|
||||||
|
MOD --> CFG
|
||||||
|
CFG --> PG
|
||||||
|
|
||||||
|
%% Settings manager persistence path
|
||||||
|
S_SET -->|persist/read| APPSET
|
||||||
|
APPSET --- PG
|
||||||
|
|
||||||
|
%% Shared DB session dependency
|
||||||
|
DEP -. provides .-> ROUTES
|
||||||
|
DEP -. session .-> SRV
|
||||||
|
|
||||||
|
%% Model entities mapping
|
||||||
|
S_BLL --> M_SCN & M_CAP & M_OPEX & M_CONS & M_PROD & M_EQP & M_MNT & M_SIMR
|
||||||
|
|
||||||
|
%% Testing coverage
|
||||||
|
UNIT --> ROUTES
|
||||||
|
UNIT --> SRV
|
||||||
|
UNIT --> TPL
|
||||||
|
UNIT --> VAL
|
||||||
|
E2E --> U
|
||||||
|
E2E --> MAIN
|
||||||
|
|
||||||
|
%% Legend
|
||||||
|
classDef store fill:#fff,stroke:#555,stroke-width:1px;
|
||||||
|
class PG store;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Notes**
|
||||||
|
|
||||||
|
- Arrows represent primary data/command flow. Dashed arrows denote shared dependencies (injected SQLAlchemy session).
|
||||||
|
- The settings pipeline shows how environment overrides and DB-backed defaults propagate to both API and UI.
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
## Module Map (code)
|
## Module Map (code)
|
||||||
|
|
||||||
- `scenario.py`: central scenario entity with relationships to cost, consumption, production, equipment, maintenance, and simulation results.
|
- `scenario.py`: central scenario entity with relationships to cost, consumption, production, equipment, maintenance, and simulation results.
|
||||||
@@ -45,6 +188,7 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- `consumption.py`, `production_output.py`: operational data tables.
|
- `consumption.py`, `production_output.py`: operational data tables.
|
||||||
- `equipment.py`, `maintenance.py`: asset management models.
|
- `equipment.py`, `maintenance.py`: asset management models.
|
||||||
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
||||||
|
- `application_setting.py`: persists editable application configuration, currently focused on theme variables but designed to store future settings categories.
|
||||||
|
|
||||||
## Service Layer
|
## Service Layer
|
||||||
|
|
||||||
|
|||||||
88
docs/architecture/05_frontend/05_03_theming.md
Normal file
88
docs/architecture/05_frontend/05_03_theming.md
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# Theming
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
CalMiner uses a centralized theming system based on CSS custom properties (variables) to ensure consistent styling across the application. The theme is stored in the database and can be customized through environment variables or the UI settings page.
|
||||||
|
|
||||||
|
## Default Theme Settings
|
||||||
|
|
||||||
|
The default theme provides a light, professional color palette suitable for business applications. The colors are defined as CSS custom properties and stored in the `application_setting` table with category "theme".
|
||||||
|
|
||||||
|
### Color Palette
|
||||||
|
|
||||||
|
| CSS Variable | Default Value | Description |
|
||||||
|
| --------------------------- | ------------------------ | ------------------------ |
|
||||||
|
| `--color-background` | `#f4f5f7` | Main background color |
|
||||||
|
| `--color-surface` | `#ffffff` | Surface/card background |
|
||||||
|
| `--color-text-primary` | `#2a1f33` | Primary text color |
|
||||||
|
| `--color-text-secondary` | `#624769` | Secondary text color |
|
||||||
|
| `--color-text-muted` | `#64748b` | Muted text color |
|
||||||
|
| `--color-text-subtle` | `#94a3b8` | Subtle text color |
|
||||||
|
| `--color-text-invert` | `#ffffff` | Text on dark backgrounds |
|
||||||
|
| `--color-text-dark` | `#0f172a` | Dark text for contrast |
|
||||||
|
| `--color-text-strong` | `#111827` | Strong/bold text |
|
||||||
|
| `--color-primary` | `#5f320d` | Primary brand color |
|
||||||
|
| `--color-primary-strong` | `#7e4c13` | Stronger primary |
|
||||||
|
| `--color-primary-stronger` | `#837c15` | Strongest primary |
|
||||||
|
| `--color-accent` | `#bff838` | Accent/highlight color |
|
||||||
|
| `--color-border` | `#e2e8f0` | Default border color |
|
||||||
|
| `--color-border-strong` | `#cbd5e1` | Strong border color |
|
||||||
|
| `--color-highlight` | `#eef2ff` | Highlight background |
|
||||||
|
| `--color-panel-shadow` | `rgba(15, 23, 42, 0.08)` | Subtle shadow |
|
||||||
|
| `--color-panel-shadow-deep` | `rgba(15, 23, 42, 0.12)` | Deeper shadow |
|
||||||
|
| `--color-surface-alt` | `#f8fafc` | Alternative surface |
|
||||||
|
| `--color-success` | `#047857` | Success state color |
|
||||||
|
| `--color-error` | `#b91c1c` | Error state color |
|
||||||
|
|
||||||
|
## Customization
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
Theme colors can be overridden using environment variables with the prefix `CALMINER_THEME_`. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export CALMINER_THEME_COLOR_BACKGROUND="#000000"
|
||||||
|
export CALMINER_THEME_COLOR_ACCENT="#ff0000"
|
||||||
|
```
|
||||||
|
|
||||||
|
The variable names are derived by:
|
||||||
|
|
||||||
|
1. Removing the `--` prefix
|
||||||
|
2. Converting to uppercase
|
||||||
|
3. Replacing `-` with `_`
|
||||||
|
4. Adding `CALMINER_THEME_` prefix
|
||||||
|
|
||||||
|
### Database Storage
|
||||||
|
|
||||||
|
Settings are stored in the `application_setting` table with:
|
||||||
|
|
||||||
|
- `category`: "theme"
|
||||||
|
- `value_type`: "color"
|
||||||
|
- `is_editable`: true
|
||||||
|
|
||||||
|
### UI Settings
|
||||||
|
|
||||||
|
Users can modify theme colors through the settings page at `/ui/settings`.
|
||||||
|
|
||||||
|
## Implementation
|
||||||
|
|
||||||
|
The theming system is implemented in:
|
||||||
|
|
||||||
|
- `services/settings.py`: Color management and defaults
|
||||||
|
- `routes/settings.py`: API endpoints for theme settings
|
||||||
|
- `static/css/main.css`: CSS variable definitions
|
||||||
|
- `templates/settings.html`: UI for theme customization
|
||||||
|
|
||||||
|
## Seeding
|
||||||
|
|
||||||
|
Default theme settings are seeded during database setup using the seed script:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python scripts/seed_data.py --theme
|
||||||
|
```
|
||||||
|
|
||||||
|
Or as part of defaults:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python scripts/seed_data.py --defaults
|
||||||
|
```
|
||||||
218
docs/architecture/07_deployment/07_01_testing_ci.md
Normal file
218
docs/architecture/07_deployment/07_01_testing_ci.md
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
# Testing, CI and Quality Assurance
|
||||||
|
|
||||||
|
This chapter centralizes the project's testing strategy, CI configuration, and quality targets.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
CalMiner uses a combination of unit, integration, and end-to-end tests to ensure quality.
|
||||||
|
|
||||||
|
### Frameworks
|
||||||
|
|
||||||
|
- Backend: pytest for unit and integration tests.
|
||||||
|
- Frontend: pytest with Playwright for E2E tests.
|
||||||
|
- Database: pytest fixtures with psycopg2 for DB tests.
|
||||||
|
|
||||||
|
### Test Types
|
||||||
|
|
||||||
|
- Unit Tests: Test individual functions/modules.
|
||||||
|
- Integration Tests: Test API endpoints and DB interactions.
|
||||||
|
- E2E Tests: Playwright for full user flows.
|
||||||
|
|
||||||
|
### CI/CD
|
||||||
|
|
||||||
|
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
||||||
|
- `test.yml` runs on every push, provisions a temporary Postgres 16 service, waits for readiness, executes the setup script in dry-run and live modes, then fans out into parallel matrix jobs for unit (`pytest tests/unit`) and end-to-end (`pytest tests/e2e`) suites. Playwright browsers install only for the E2E job.
|
||||||
|
- `build-and-push.yml` runs only after the **Run Tests** workflow finishes successfully (triggered via `workflow_run` on `main`). Once tests pass, it builds the Docker image with `docker/build-push-action@v2`, reuses cache-backed layers, and pushes to the Gitea registry.
|
||||||
|
- `deploy.yml` runs only after the build workflow reports success on `main`. It connects to the target host (via `appleboy/ssh-action`), pulls the Docker image tagged with the build commit SHA, and restarts the container with that exact image reference.
|
||||||
|
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
||||||
|
- Run tests on pull requests to shared branches; enforce coverage target ≥80% (pytest-cov).
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
- Unit: `pytest tests/unit/`
|
||||||
|
- E2E: `pytest tests/e2e/`
|
||||||
|
- All: `pytest`
|
||||||
|
|
||||||
|
### Test Directory Structure
|
||||||
|
|
||||||
|
Organize tests under the `tests/` directory mirroring the application structure:
|
||||||
|
|
||||||
|
```text
|
||||||
|
tests/
|
||||||
|
unit/
|
||||||
|
test_<module>.py
|
||||||
|
e2e/
|
||||||
|
test_<flow>.py
|
||||||
|
fixtures/
|
||||||
|
conftest.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fixtures and Test Data
|
||||||
|
|
||||||
|
- Define reusable fixtures in `tests/fixtures/conftest.py`.
|
||||||
|
- Use temporary in-memory databases or isolated schemas for DB tests.
|
||||||
|
- Load sample data via fixtures for consistent test environments.
|
||||||
|
- Leverage the `seeded_ui_data` fixture in `tests/unit/conftest.py` to populate scenarios with related cost, maintenance, and simulation records for deterministic UI route checks.
|
||||||
|
|
||||||
|
### E2E (Playwright) Tests
|
||||||
|
|
||||||
|
The E2E test suite, located in `tests/e2e/`, uses Playwright to simulate user interactions in a live browser environment. These tests are designed to catch issues in the UI, frontend-backend integration, and overall application flow.
|
||||||
|
|
||||||
|
#### Fixtures
|
||||||
|
|
||||||
|
- `live_server`: A session-scoped fixture that launches the FastAPI application in a separate process, making it accessible to the browser.
|
||||||
|
- `playwright_instance`, `browser`, `page`: Standard `pytest-playwright` fixtures for managing the Playwright instance, browser, and individual pages.
|
||||||
|
|
||||||
|
#### Smoke Tests
|
||||||
|
|
||||||
|
- UI Page Loading: `test_smoke.py` contains a parameterized test that systematically navigates to all UI routes to ensure they load without errors, have the correct title, and display a primary heading.
|
||||||
|
- Form Submissions: Each major form in the application has a corresponding test file (e.g., `test_scenarios.py`, `test_costs.py`) that verifies: page loads, create item by filling the form, success message, and UI updates.
|
||||||
|
|
||||||
|
### Running E2E Tests
|
||||||
|
|
||||||
|
To run the Playwright tests:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pytest tests/e2e/
|
||||||
|
````
|
||||||
|
|
||||||
|
To run headed mode:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pytest tests/e2e/ --headed
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mocking and Dependency Injection
|
||||||
|
|
||||||
|
- Use `unittest.mock` to mock external dependencies.
|
||||||
|
- Inject dependencies via function parameters or FastAPI's dependency overrides in tests.
|
||||||
|
|
||||||
|
### Code Coverage
|
||||||
|
|
||||||
|
- Install `pytest-cov` to generate coverage reports.
|
||||||
|
- Run with coverage: `pytest --cov --cov-report=term` (use `--cov-report=html` when visualizing hotspots).
|
||||||
|
- Target 95%+ overall coverage. Focus on historically low modules: `services/simulation.py`, `services/reporting.py`, `middleware/validation.py`, and `routes/ui.py`.
|
||||||
|
- Latest snapshot (2025-10-21): `pytest --cov=. --cov-report=term-missing` returns **91%** overall coverage.
|
||||||
|
|
||||||
|
### CI Integration
|
||||||
|
|
||||||
|
`test.yml` encapsulates the steps below:
|
||||||
|
|
||||||
|
- Check out the repository and set up Python 3.10.
|
||||||
|
- Configure the runner's apt proxy (if available), install project dependencies (requirements + test extras), and download Playwright browsers.
|
||||||
|
- Run `pytest` (extend with `--cov` flags when enforcing coverage).
|
||||||
|
|
||||||
|
> The pip cache step is temporarily disabled in `test.yml` until the self-hosted cache service is exposed (see `docs/ci-cache-troubleshooting.md`).
|
||||||
|
|
||||||
|
`build-and-push.yml` adds:
|
||||||
|
|
||||||
|
- Registry login using repository secrets.
|
||||||
|
- Docker image build/push with GHA cache storage (`cache-from/cache-to` set to `type=gha`).
|
||||||
|
|
||||||
|
`deploy.yml` handles:
|
||||||
|
|
||||||
|
- SSH into the deployment host.
|
||||||
|
- Pull the tagged image from the registry.
|
||||||
|
- Stop, remove, and relaunch the `calminer` container exposing port 8000.
|
||||||
|
|
||||||
|
When adding new workflows, mirror this structure to ensure secrets, caching, and deployment steps remain aligned with the production environment.
|
||||||
|
|
||||||
|
## Workflow Optimization Opportunities
|
||||||
|
|
||||||
|
### `test.yml`
|
||||||
|
|
||||||
|
- Run the apt-proxy setup once via a composite action or preconfigured runner image if additional matrix jobs are added.
|
||||||
|
- Collapse dependency installation into a single `pip install -r requirements-test.txt` call (includes base requirements) once caching is restored.
|
||||||
|
- Investigate caching or pre-baking Playwright browser binaries to eliminate >650 MB cold downloads per run.
|
||||||
|
|
||||||
|
### `build-and-push.yml`
|
||||||
|
|
||||||
|
- Skip QEMU setup or explicitly constrain Buildx to linux/amd64 to reduce startup time.
|
||||||
|
- Enable `cache-from` / `cache-to` settings (registry or `type=gha`) to reuse Docker build layers between runs.
|
||||||
|
|
||||||
|
### `deploy.yml`
|
||||||
|
|
||||||
|
- Extract deployment script into a reusable shell script or compose file to minimize inline secrets and ease multi-environment scaling.
|
||||||
|
- Add a post-deploy health check (e.g., `curl` readiness probe) before declaring success.
|
||||||
|
|
||||||
|
### Priority Overview
|
||||||
|
|
||||||
|
1. Restore shared caching for Python wheels and Playwright browsers once infrastructure exposes the cache service (highest impact on runtime and bandwidth; requires coordination with CI owners).
|
||||||
|
2. Enable Docker layer caching in `build-and-push.yml` to shorten build cycles (medium effort, immediate benefit to release workflows).
|
||||||
|
3. Add post-deploy health verification to `deploy.yml` (low effort, improves confidence in automation).
|
||||||
|
4. Streamline redundant setup steps in `test.yml` (medium effort once cache strategy is in place; consider composite actions or base image updates).
|
||||||
|
|
||||||
|
### Setup Consolidation Opportunities
|
||||||
|
|
||||||
|
- `Run Tests` matrix jobs each execute the apt proxy configuration, pip installs, database wait, and setup scripts. A composite action or shell script wrapper could centralize these routines and parameterize target-specific behavior (unit vs e2e) to avoid copy/paste maintenance as additional jobs (lint, type check) are introduced.
|
||||||
|
- Both the test and build workflows perform a `checkout` step; while unavoidable per workflow, shared git submodules or sparse checkout rules could be encapsulated in a composite action to keep options consistent.
|
||||||
|
- The database setup script currently runs twice (dry-run and live) for every matrix leg. Evaluate whether the dry-run remains necessary once migrations stabilize; if retained, consider adding an environment variable toggle to skip redundant seed operations for read-only suites (e.g., lint).
|
||||||
|
|
||||||
|
### Proposed Shared Setup Action
|
||||||
|
|
||||||
|
- Location: `.gitea/actions/setup-python-env/action.yml` (composite action).
|
||||||
|
- Inputs:
|
||||||
|
- `python-version` (default `3.10`): forwarded to `actions/setup-python`.
|
||||||
|
- `install-playwright` (default `false`): when `true`, run `python -m playwright install --with-deps`.
|
||||||
|
- `install-requirements` (default `requirements.txt requirements-test.txt`): space-delimited list pip installs iterate over.
|
||||||
|
- `run-db-setup` (default `true`): toggles database wait + setup scripts.
|
||||||
|
- `db-dry-run` (default `true`): controls whether the dry-run invocation executes.
|
||||||
|
- Steps encapsulated:
|
||||||
|
1. Set up Python via `actions/setup-python@v5` using provided version.
|
||||||
|
2. Configure apt proxy via shared shell snippet (with graceful fallback when proxy offline).
|
||||||
|
3. Iterate over requirement files and execute `pip install -r <file>`.
|
||||||
|
4. If `install-playwright == true`, install browsers.
|
||||||
|
5. If `run-db-setup == true`, run the wait-for-Postgres python snippet and call `scripts/setup_database.py`, honoring `db-dry-run` toggle.
|
||||||
|
- Usage sketch (in `test.yml`):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Prepare Python environment
|
||||||
|
uses: ./.gitea/actions/setup-python-env
|
||||||
|
with:
|
||||||
|
install-playwright: ${{ matrix.target == 'e2e' }}
|
||||||
|
db-dry-run: true
|
||||||
|
```
|
||||||
|
|
||||||
|
- Benefits: centralizes proxy logic and dependency installs, reduces duplication across matrix jobs, and keeps future lint/type-check jobs lightweight by disabling database setup.
|
||||||
|
- Implementation status: action available at `.gitea/actions/setup-python-env` and consumed by `test.yml`; extend to additional workflows as they adopt the shared routine.
|
||||||
|
- Obsolete steps removed: individual apt proxy, dependency install, Playwright, and database setup commands pruned from `test.yml` once the composite action was integrated.
|
||||||
|
|
||||||
|
## CI Owner Coordination Notes
|
||||||
|
|
||||||
|
### Key Findings
|
||||||
|
|
||||||
|
- Self-hosted runner: ASUS System Product Name chassis with AMD Ryzen 7 7700X (8 physical cores / 16 threads) and 63.2 GB usable RAM; `act_runner` configuration not overridden, so only one workflow job runs concurrently today.
|
||||||
|
- Unit test matrix job: completes 117 pytest cases in roughly 4.1 seconds after Postgres spins up; Docker services consume ~150 MB for `postgres:16-alpine`, with minimal sustained CPU load once tests begin.
|
||||||
|
- End-to-end matrix job: `pytest tests/e2e` averages 21‑22 seconds of execution, but a cold run downloads ~179 MB of apt packages plus ~470 MB of Playwright browser bundles (Chromium, Firefox, WebKit, FFmpeg), exceeding 650 MB network transfer and adding several gigabytes of disk writes if caches are absent.
|
||||||
|
- Both jobs reuse existing Python package caches when available; absent a shared cache service, repeated Playwright installs remain the dominant cost driver for cold executions.
|
||||||
|
|
||||||
|
### Open Questions
|
||||||
|
|
||||||
|
- Can we raise the runner concurrency above the default single job, or provision an additional runner, so the test matrix can execute without serializing queued workflows?
|
||||||
|
- Is there a central cache or artifact service available for Python wheels and Playwright browser bundles to avoid ~650 MB downloads on cold starts?
|
||||||
|
- Are we permitted to bake Playwright browsers into the base runner image, or should we pursue a shared cache/proxy solution instead?
|
||||||
|
|
||||||
|
### Outreach Draft
|
||||||
|
|
||||||
|
```text
|
||||||
|
Subject: CalMiner CI parallelization support
|
||||||
|
|
||||||
|
Hi <CI Owner>,
|
||||||
|
|
||||||
|
We recently updated the CalMiner test workflow to fan out unit and Playwright E2E suites in parallel. While validating the change, we gathered the following:
|
||||||
|
|
||||||
|
- Runner host: ASUS System Product Name with AMD Ryzen 7 7700X (8 cores / 16 threads), ~63 GB RAM, default `act_runner` concurrency (1 job at a time).
|
||||||
|
- Unit job finishes in ~4.1 s once Postgres is ready; light CPU and network usage.
|
||||||
|
- E2E job finishes in ~22 s, but a cold run pulls ~179 MB of apt packages plus ~470 MB of Playwright browser payloads (>650 MB download, several GB disk writes) because we do not have a shared cache yet.
|
||||||
|
|
||||||
|
To move forward, could you help with the following?
|
||||||
|
|
||||||
|
1. Confirm whether we can raise the runner concurrency limit or provision an additional runner so parallel jobs do not queue behind one another.
|
||||||
|
2. Let us know if a central cache (Artifactory, Nexus, etc.) is available for Python wheels and Playwright browser bundles, or if we should consider baking the browsers into the runner image instead.
|
||||||
|
3. Share any guidance on preferred caching or proxy solutions for large binary installs on self-hosted runners.
|
||||||
|
|
||||||
|
Once we have clarity, we can finalize the parallel rollout and update the documentation accordingly.
|
||||||
|
|
||||||
|
Thanks,
|
||||||
|
<Your Name>
|
||||||
|
```
|
||||||
152
docs/architecture/07_deployment/07_03_gitea_action_runner.md
Normal file
152
docs/architecture/07_deployment/07_03_gitea_action_runner.md
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
# Gitea Action Runner Setup
|
||||||
|
|
||||||
|
This guide describes how to provision, configure, and maintain self-hosted runners for CalMiner's Gitea-based CI/CD pipelines.
|
||||||
|
|
||||||
|
## 1. Purpose and Scope
|
||||||
|
|
||||||
|
- Explain the role runners play in executing GitHub Actions–compatible workflows inside our private Gitea instance.
|
||||||
|
- Define supported environments (Windows hosts running Docker for Linux containers today, Alpine or other Linux variants as future additions).
|
||||||
|
- Provide repeatable steps so additional runners can be brought online quickly and consistently.
|
||||||
|
|
||||||
|
## 2. Prerequisites
|
||||||
|
|
||||||
|
- **Hardware**: Minimum 8 vCPU, 16 GB RAM, and 50 GB free disk. For Playwright-heavy suites, plan for ≥60 GB free to absorb browser caches.
|
||||||
|
- **Operating system**: Current runner uses Windows 11 Pro (10.0.26100, 64-bit). Linux instructions mirror the same flow; see section 7 for Alpine specifics.
|
||||||
|
- **Container engine**: Docker Desktop (Windows) or Docker Engine (Linux) with pull access to `docker.gitea.com/runner-images` and `postgres:16-alpine`.
|
||||||
|
- **Dependencies**: `curl`, `tar`, PowerShell 7+ (Windows), or standard GNU utilities (Linux) to unpack releases.
|
||||||
|
- **Gitea access**: Repository admin or site admin token with permission to register self-hosted runners (`Settings → Runners → New Runner`).
|
||||||
|
|
||||||
|
### Current Runner Inventory (October 2025)
|
||||||
|
|
||||||
|
- Hostname `DESKTOP-GLB3A15`; ASUS System Product Name chassis with AMD Ryzen 7 7700X (8C/16T) and ~63 GB usable RAM.
|
||||||
|
- Windows 11 Pro 10.0.26100 (64-bit) hosting Docker containers for Ubuntu-based job images.
|
||||||
|
- `act_runner` version `v0.2.13`; no `act_runner.yaml` present, so defaults apply (single concurrency, no custom labels beyond registration).
|
||||||
|
- Registered against `http://192.168.88.30:3000` with labels:
|
||||||
|
- `ubuntu-latest:docker://docker.gitea.com/runner-images:ubuntu-latest`
|
||||||
|
- `ubuntu-24.04:docker://docker.gitea.com/runner-images:ubuntu-24.04`
|
||||||
|
- `ubuntu-22.04:docker://docker.gitea.com/runner-images:ubuntu-22.04`
|
||||||
|
- Runner metadata stored in `.runner`; removing this file forces re-registration and should only be done intentionally.
|
||||||
|
|
||||||
|
## 3. Runner Installation
|
||||||
|
|
||||||
|
### 3.1 Download and Extract
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
$runnerVersion = "v0.2.13"
|
||||||
|
$downloadUrl = "https://gitea.com/gitea/act_runner/releases/download/$runnerVersion/act_runner_${runnerVersion}_windows_amd64.zip"
|
||||||
|
Invoke-WebRequest -Uri $downloadUrl -OutFile act_runner.zip
|
||||||
|
Expand-Archive act_runner.zip -DestinationPath C:\Tools\act-runner -Force
|
||||||
|
```
|
||||||
|
|
||||||
|
For Linux, download the `linux_amd64.tar.gz` artifact and extract with `tar -xzf` into `/opt/act-runner`.
|
||||||
|
|
||||||
|
### 3.2 Configure Working Directory
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
Set-Location C:\Tools\act-runner
|
||||||
|
New-Item -ItemType Directory -Path logs -Force | Out-Null
|
||||||
|
```
|
||||||
|
|
||||||
|
Ensure the directory is writable by the service account that will execute the runner.
|
||||||
|
|
||||||
|
### 3.3 Register With Gitea
|
||||||
|
|
||||||
|
1. In Gitea, navigate to the repository or organization **Settings → Runners → New Runner**.
|
||||||
|
2. Copy the registration token and instance URL.
|
||||||
|
3. Execute the registration wizard:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
.\act_runner.exe register --instance http://192.168.88.30:3000 --token <TOKEN> --labels "ubuntu-latest:docker://docker.gitea.com/runner-images:ubuntu-latest" "ubuntu-24.04:docker://docker.gitea.com/runner-images:ubuntu-24.04" "ubuntu-22.04:docker://docker.gitea.com/runner-images:ubuntu-22.04"
|
||||||
|
```
|
||||||
|
|
||||||
|
Linux syntax is identical using `./act_runner register`.
|
||||||
|
|
||||||
|
This command populates `.runner` with the runner ID, UUID, and labels.
|
||||||
|
|
||||||
|
## 4. Service Configuration
|
||||||
|
|
||||||
|
### 4.1 Windows Service
|
||||||
|
|
||||||
|
Act Runner provides a built-in service helper:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
.\act_runner.exe install
|
||||||
|
.\act_runner.exe start
|
||||||
|
```
|
||||||
|
|
||||||
|
The service runs under `LocalSystem` by default. Use `.\act_runner.exe install --user <DOMAIN\User> --password <Secret>` if isolation is required.
|
||||||
|
|
||||||
|
### 4.2 Linux systemd Unit
|
||||||
|
|
||||||
|
Create `/etc/systemd/system/act-runner.service`:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[Unit]
|
||||||
|
Description=Gitea Act Runner
|
||||||
|
After=docker.service
|
||||||
|
Requires=docker.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
WorkingDirectory=/opt/act-runner
|
||||||
|
ExecStart=/opt/act-runner/act_runner daemon
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
Environment="HTTP_PROXY=http://apt-cacher:3142" "HTTPS_PROXY=http://apt-cacher:3142"
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
```
|
||||||
|
|
||||||
|
Enable and start:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
sudo systemctl enable --now act-runner.service
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3 Environment Variables and Proxy Settings
|
||||||
|
|
||||||
|
- Configure `HTTP_PROXY`, `HTTPS_PROXY`, and their lowercase variants to leverage the shared apt cache (`http://apt-cacher:3142`).
|
||||||
|
- Persist Docker registry credentials (for `docker.gitea.com`) in the service user profile using `docker login`; workflows rely on cached authentication for builds.
|
||||||
|
- To expose pip caching once infrastructure is available, set `PIP_INDEX_URL` and `PIP_EXTRA_INDEX_URL` at the service level.
|
||||||
|
|
||||||
|
### 4.4 Logging
|
||||||
|
|
||||||
|
- Windows services write to `%ProgramData%\act-runner\logs`. Redirect or forward to centralized logging if required.
|
||||||
|
- Linux installations can leverage `journalctl -u act-runner` and logrotate rules for `/opt/act-runner/logs`.
|
||||||
|
|
||||||
|
## 5. Network and Security
|
||||||
|
|
||||||
|
- **Outbound**: Allow HTTPS traffic to the Gitea instance, Docker Hub, docker.gitea.com, npm (for Playwright), PyPI, and the apt cache proxy.
|
||||||
|
- **Inbound**: No inbound ports are required; block unsolicited traffic on internet-facing hosts.
|
||||||
|
- **Credentials**: Store deployment SSH keys and registry credentials in Gitea secrets, not on the runner host.
|
||||||
|
- **Least privilege**: Run the service under a dedicated account with access only to Docker and required directories.
|
||||||
|
|
||||||
|
## 6. Maintenance and Upgrades
|
||||||
|
|
||||||
|
- **Version checks**: Monitor `https://gitea.com/gitea/act_runner/releases` and schedule upgrades quarterly or when security fixes drop.
|
||||||
|
- **Upgrade procedure**: Stop the service, replace `act_runner` binary, restart. Re-registration is not required as long as `.runner` remains intact.
|
||||||
|
- **Health checks**: Periodically validate connectivity with `act_runner exec --detect-event -W .gitea/workflows/test.yml` and inspect workflow durations to catch regressions.
|
||||||
|
- **Cleanup**: Purge Docker images and volumes monthly (`docker system prune -af`) to reclaim disk space.
|
||||||
|
- **Troubleshooting**: Use `act_runner diagnose` (if available in newer versions) or review logs for repeated failures; reset by stopping the service, deleting stale job containers (`docker ps -a`), and restarting.
|
||||||
|
|
||||||
|
## 7. Alpine-based Runner Notes
|
||||||
|
|
||||||
|
- Install baseline packages: `apk add docker bash curl coreutils nodejs npm python3 py3-pip libstdc++`.
|
||||||
|
- Playwright requirements: add `apk add chromium nss freetype harfbuzz ca-certificates mesa-gl` or install Playwright browsers via `npx playwright install --with-deps` using the Alpine bundle.
|
||||||
|
- Musl vs glibc: When workflows require glibc (e.g., certain Python wheels), include `apk add gcompat` or base images on `frolvlad/alpine-glibc`.
|
||||||
|
- Systemd alternative: Use `rc-service` or `supervisord` to manage `act_runner daemon` on Alpine since systemd is absent.
|
||||||
|
- Storage: Mount `/var/lib/docker` to persistent storage if running inside a VM, ensuring browser downloads and layer caches survive restarts.
|
||||||
|
|
||||||
|
## 8. Appendix
|
||||||
|
|
||||||
|
- **Troubleshooting checklist**:
|
||||||
|
- Verify Docker daemon is healthy (`docker info`).
|
||||||
|
- Confirm `.runner` file exists and lists expected labels.
|
||||||
|
- Re-run `act_runner register` if the runner no longer appears in Gitea.
|
||||||
|
- Check proxy endpoints are reachable before jobs start downloading dependencies.
|
||||||
|
|
||||||
|
- **Related documentation**:
|
||||||
|
- `docs/architecture/07_deployment/07_01_testing_ci.md` (workflow architecture and CI owner coordination).
|
||||||
|
- `docs/ci-cache-troubleshooting.md` (pip caching status and known issues).
|
||||||
|
- `.gitea/actions/setup-python-env/action.yml` (shared job preparation logic referenced in workflows).
|
||||||
@@ -15,23 +15,68 @@ The CalMiner application is deployed using a multi-tier architecture consisting
|
|||||||
1. **Client Layer**: This layer consists of web browsers that interact with the application through a user interface rendered by Jinja2 templates and enhanced with JavaScript (Chart.js for dashboards).
|
1. **Client Layer**: This layer consists of web browsers that interact with the application through a user interface rendered by Jinja2 templates and enhanced with JavaScript (Chart.js for dashboards).
|
||||||
2. **Web Application Layer**: This layer hosts the FastAPI application, which handles API requests, business logic, and serves HTML templates. It communicates with the database layer for data persistence.
|
2. **Web Application Layer**: This layer hosts the FastAPI application, which handles API requests, business logic, and serves HTML templates. It communicates with the database layer for data persistence.
|
||||||
3. **Database Layer**: This layer consists of a PostgreSQL database that stores all application data, including scenarios, parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results.
|
3. **Database Layer**: This layer consists of a PostgreSQL database that stores all application data, including scenarios, parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results.
|
||||||
4. **Caching Layer**: This layer uses Redis to cache frequently accessed data and improve application performance.
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
A[Client Layer] --> B[Web Application Layer]
|
||||||
|
B --> C[Database Layer]
|
||||||
|
```
|
||||||
|
|
||||||
## Infrastructure Components
|
## Infrastructure Components
|
||||||
|
|
||||||
The infrastructure components for the application include:
|
The infrastructure components for the application include:
|
||||||
|
|
||||||
- **Web Server**: Hosts the FastAPI application and serves API endpoints.
|
|
||||||
- **Database Server**: PostgreSQL database for persisting application data.
|
|
||||||
- **Static File Server**: Serves static assets such as CSS, JavaScript, and image files.
|
|
||||||
- **Reverse Proxy (optional)**: An Nginx or Apache server can be used as a reverse proxy.
|
- **Reverse Proxy (optional)**: An Nginx or Apache server can be used as a reverse proxy.
|
||||||
- **Containerization**: Docker images are generated via the repository `Dockerfile`, using a multi-stage build to keep the final runtime minimal.
|
- **Containerization**: Docker images are generated via the repository `Dockerfile`, using a multi-stage build to keep the final runtime minimal.
|
||||||
- **CI/CD Pipeline**: Automated pipelines (Gitea Actions) run tests, build/push Docker images, and trigger deployments.
|
- **CI/CD Pipeline**: Automated pipelines (Gitea Actions) run tests, build/push Docker images, and trigger deployments.
|
||||||
|
- **Gitea Actions Workflows**: Located under `.gitea/workflows/`, these workflows handle testing, building, pushing, and deploying the application.
|
||||||
|
- **Gitea Action Runners**: Self-hosted runners execute the CI/CD workflows.
|
||||||
|
- **Testing and Continuous Integration**: Automated tests ensure code quality before deployment, also documented in [Testing & CI](07_deployment/07_01_testing_ci.md.md).
|
||||||
|
- **Docker Infrastructure**: Docker is used to containerize the application for consistent deployment across environments.
|
||||||
|
- **Portainer**: Production deployment environment for managing Docker containers.
|
||||||
|
- **Web Server**: Hosts the FastAPI application and serves API endpoints.
|
||||||
|
- **Database Server**: PostgreSQL database for persisting application data.
|
||||||
|
- **Static File Server**: Serves static assets such as CSS, JavaScript, and image files.
|
||||||
- **Cloud Infrastructure (optional)**: The application can be deployed on cloud platforms.
|
- **Cloud Infrastructure (optional)**: The application can be deployed on cloud platforms.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
G[Git Repository] --> C[CI/CD Pipeline]
|
||||||
|
C --> GAW[Gitea Action Workflows]
|
||||||
|
GAW --> GAR[Gitea Action Runners]
|
||||||
|
GAR --> T[Testing]
|
||||||
|
GAR --> CI[Continuous Integration]
|
||||||
|
T --> G
|
||||||
|
CI --> G
|
||||||
|
|
||||||
|
W[Web Server] --> DB[Database Server]
|
||||||
|
RP[Reverse Proxy] --> W
|
||||||
|
I((Internet)) <--> RP
|
||||||
|
PO[Containerization] --> W
|
||||||
|
C[CI/CD Pipeline] --> PO
|
||||||
|
W --> S[Static File Server]
|
||||||
|
S --> RP
|
||||||
|
PO --> DB
|
||||||
|
PO --> S
|
||||||
|
```
|
||||||
|
|
||||||
## Environments
|
## Environments
|
||||||
|
|
||||||
The application can be deployed in multiple environments to support development, testing, and production:
|
The application can be deployed in multiple environments to support development, testing, and production.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
R[Repository] --> DEV[Development Environment]
|
||||||
|
R[Repository] --> TEST[Testing Environment]
|
||||||
|
R[Repository] --> PROD[Production Environment]
|
||||||
|
|
||||||
|
DEV --> W_DEV[Web Server - Dev]
|
||||||
|
DEV --> DB_DEV[Database Server - Dev]
|
||||||
|
TEST --> W_TEST[Web Server - Test]
|
||||||
|
TEST --> DB_TEST[Database Server - Test]
|
||||||
|
PROD --> W_PROD[Web Server - Prod]
|
||||||
|
PROD --> DB_PROD[Database Server - Prod]
|
||||||
|
```
|
||||||
|
|
||||||
### Development Environment
|
### Development Environment
|
||||||
|
|
||||||
@@ -54,23 +99,23 @@ The production environment is set up for serving live traffic and includes:
|
|||||||
|
|
||||||
- Production PostgreSQL instance
|
- Production PostgreSQL instance
|
||||||
- FastAPI server running in production mode
|
- FastAPI server running in production mode
|
||||||
- Load balancer (e.g., Nginx) for distributing incoming requests
|
- Load balancer (Traefik) for distributing incoming requests
|
||||||
- Monitoring and logging tools for tracking application performance
|
- Monitoring and logging tools for tracking application performance
|
||||||
|
|
||||||
## Containerized Deployment Flow
|
## Containerized Deployment Flow
|
||||||
|
|
||||||
The Docker-based deployment path aligns with the solution strategy documented in [04 — Solution Strategy](04_solution_strategy.md) and the CI practices captured in [14 — Testing & CI](14_testing_ci.md).
|
The Docker-based deployment path aligns with the solution strategy documented in [Solution Strategy](04_solution_strategy.md) and the CI practices captured in [Testing & CI](07_deployment/07_01_testing_ci.md.md).
|
||||||
|
|
||||||
### Image Build
|
### Image Build
|
||||||
|
|
||||||
- The multi-stage `Dockerfile` installs dependencies in a builder layer (including system compilers and Python packages) and copies only the required runtime artifacts to the final image.
|
- The multi-stage `Dockerfile` installs dependencies in a builder layer (including system compilers and Python packages) and copies only the required runtime artifacts to the final image.
|
||||||
- Build arguments are minimal; database configuration is supplied at runtime via granular variables (`DATABASE_DRIVER`, `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME`, optional `DATABASE_SCHEMA`). Secrets and configuration should be passed via environment variables or an orchestrator.
|
- Build arguments are minimal; database configuration is supplied at runtime via granular variables (`DATABASE_DRIVER`, `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME`, optional `DATABASE_SCHEMA`). Secrets and configuration should be passed via environment variables or an orchestrator.
|
||||||
- The resulting image exposes port `8000` and starts `uvicorn main:app` (s. [README.md](../../README.md)).
|
- The resulting image exposes port `8000` and starts `uvicorn main:app` (see main [README.md](../../README.md)).
|
||||||
|
|
||||||
### Runtime Environment
|
### Runtime Environment
|
||||||
|
|
||||||
- For single-node deployments, run the container alongside PostgreSQL/Redis using Docker Compose or an equivalent orchestrator.
|
- For single-node deployments, run the container alongside PostgreSQL/Redis using Docker Compose or an equivalent orchestrator.
|
||||||
- A reverse proxy (e.g., Nginx) terminates TLS and forwards traffic to the container on port `8000`.
|
- A reverse proxy (Traefik) terminates TLS and forwards traffic to the container on port `8000`.
|
||||||
- Migrations must be applied prior to rolling out a new image; automation can hook into the deploy step to run `scripts/run_migrations.py`.
|
- Migrations must be applied prior to rolling out a new image; automation can hook into the deploy step to run `scripts/run_migrations.py`.
|
||||||
|
|
||||||
### CI/CD Integration
|
### CI/CD Integration
|
||||||
@@ -80,7 +125,7 @@ The Docker-based deployment path aligns with the solution strategy documented in
|
|||||||
- `build-and-push.yml` logs into the container registry, rebuilds the Docker image using GitHub Actions cache-backed layers, and pushes `latest` (and additional tags as required).
|
- `build-and-push.yml` logs into the container registry, rebuilds the Docker image using GitHub Actions cache-backed layers, and pushes `latest` (and additional tags as required).
|
||||||
- `deploy.yml` connects to the target host via SSH, pulls the pushed tag, stops any existing container, and launches the new version.
|
- `deploy.yml` connects to the target host via SSH, pulls the pushed tag, stops any existing container, and launches the new version.
|
||||||
- Required secrets: `REGISTRY_URL`, `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
- Required secrets: `REGISTRY_URL`, `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
||||||
- Extend these workflows when introducing staging/blue-green deployments; keep cross-links with [14 — Testing & CI](14_testing_ci.md) up to date.
|
- Extend these workflows when introducing staging/blue-green deployments; keep cross-links with [Testing & CI](07_deployment/07_01_testing_ci.md.md) up to date.
|
||||||
|
|
||||||
## Integrations and Future Work (deployment-related)
|
## Integrations and Future Work (deployment-related)
|
||||||
|
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ See [Domain Models](08_concepts/08_01_domain_models.md) document for detailed cl
|
|||||||
- `production_output`: production metrics per scenario.
|
- `production_output`: production metrics per scenario.
|
||||||
- `equipment` and `maintenance`: equipment inventory and maintenance events with dates/costs.
|
- `equipment` and `maintenance`: equipment inventory and maintenance events with dates/costs.
|
||||||
- `simulation_result`: staging table for future Monte Carlo outputs (not yet populated by `run_simulation`).
|
- `simulation_result`: staging table for future Monte Carlo outputs (not yet populated by `run_simulation`).
|
||||||
|
- `application_setting`: centralized key/value store for UI and system configuration, supporting typed values, categories, and editability flags so administrators can manage theme variables and future global options without code changes.
|
||||||
|
|
||||||
Foreign keys secure referential integrity between domain tables and their scenarios, enabling per-scenario analytics.
|
Foreign keys secure referential integrity between domain tables and their scenarios, enabling per-scenario analytics.
|
||||||
|
|
||||||
|
|||||||
36
docs/architecture/08_concepts/08_01_security.md
Normal file
36
docs/architecture/08_concepts/08_01_security.md
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# User Roles and Permissions Model
|
||||||
|
|
||||||
|
This document outlines the proposed user roles and permissions model for the CalMiner application.
|
||||||
|
|
||||||
|
## User Roles
|
||||||
|
|
||||||
|
- **Admin:** Full access to all features, including user management, application settings, and all data.
|
||||||
|
- **Analyst:** Can create, view, edit, and delete scenarios, run simulations, and view reports. Cannot modify application settings or manage users.
|
||||||
|
- **Viewer:** Can view scenarios, simulations, and reports. Cannot create, edit, or delete anything.
|
||||||
|
|
||||||
|
## Permissions (examples)
|
||||||
|
|
||||||
|
- `users:manage`: Admin only.
|
||||||
|
- `settings:manage`: Admin only.
|
||||||
|
- `scenarios:create`: Admin, Analyst.
|
||||||
|
- `scenarios:view`: Admin, Analyst, Viewer.
|
||||||
|
- `scenarios:edit`: Admin, Analyst.
|
||||||
|
- `scenarios:delete`: Admin, Analyst.
|
||||||
|
- `simulations:run`: Admin, Analyst.
|
||||||
|
- `simulations:view`: Admin, Analyst, Viewer.
|
||||||
|
- `reports:view`: Admin, Analyst, Viewer.
|
||||||
|
|
||||||
|
## Authentication System
|
||||||
|
|
||||||
|
The authentication system uses JWT (JSON Web Tokens) for securing API endpoints. Users can register with a username, email, and password. Passwords are hashed using bcrypt. Upon successful login, an access token is issued, which must be included in subsequent requests for protected resources.
|
||||||
|
|
||||||
|
## Key Components
|
||||||
|
|
||||||
|
- **Password Hashing:** `passlib.context.CryptContext` with `bcrypt` scheme.
|
||||||
|
- **Token Creation & Verification:** `jose.jwt` for encoding and decoding JWTs.
|
||||||
|
- **Authentication Flow:**
|
||||||
|
1. User registers via `/users/register`.
|
||||||
|
2. User logs in via `/users/login` to obtain an access token.
|
||||||
|
3. The access token is sent in the `Authorization` header (Bearer token) for protected routes.
|
||||||
|
4. The `get_current_user` dependency verifies the token and retrieves the authenticated user.
|
||||||
|
- **Password Reset:** A placeholder `forgot_password` endpoint is available, and a `reset_password` endpoint allows users to set a new password with a valid token (token generation and email sending are not yet implemented).
|
||||||
@@ -28,6 +28,32 @@ Import macros via:
|
|||||||
- **Tables**: `.table-container` wrappers need overflow handling for narrow viewports; consider `overflow-x: auto` with padding adjustments.
|
- **Tables**: `.table-container` wrappers need overflow handling for narrow viewports; consider `overflow-x: auto` with padding adjustments.
|
||||||
- **Feedback/Empty states**: Messages use default font weight and spacing; a utility class for margin/padding would ensure consistent separation from forms or tables.
|
- **Feedback/Empty states**: Messages use default font weight and spacing; a utility class for margin/padding would ensure consistent separation from forms or tables.
|
||||||
|
|
||||||
|
## CSS Variable Naming Conventions
|
||||||
|
|
||||||
|
The project adheres to a clear and descriptive naming convention for CSS variables, primarily defined in `static/css/main.css`.
|
||||||
|
|
||||||
|
## Naming Structure
|
||||||
|
|
||||||
|
Variables are prefixed based on their category:
|
||||||
|
|
||||||
|
- `--color-`: For all color-related variables (e.g., `--color-primary`, `--color-background`, `--color-text-primary`).
|
||||||
|
- `--space-`: For spacing and layout-related variables (e.g., `--space-sm`, `--space-md`, `--space-lg`).
|
||||||
|
- `--font-size-`: For font size variables (e.g., `--font-size-base`, `--font-size-lg`).
|
||||||
|
- Other specific prefixes for components or properties (e.g., `--panel-radius`, `--table-radius`).
|
||||||
|
|
||||||
|
## Descriptive Names
|
||||||
|
|
||||||
|
Color names are chosen to be semantically meaningful rather than literal color values, allowing for easier theme changes. For example:
|
||||||
|
|
||||||
|
- `--color-primary`: Represents the main brand color.
|
||||||
|
- `--color-accent`: Represents an accent color used for highlights.
|
||||||
|
- `--color-text-primary`: The main text color.
|
||||||
|
- `--color-text-muted`: A lighter text color for less emphasis.
|
||||||
|
- `--color-surface`: The background color for UI elements like cards or panels.
|
||||||
|
- `--color-background`: The overall page background color.
|
||||||
|
|
||||||
|
This approach ensures that the CSS variables are intuitive, maintainable, and easily adaptable for future theme customizations.
|
||||||
|
|
||||||
## Per-page data & actions
|
## Per-page data & actions
|
||||||
|
|
||||||
Short reference of per-page APIs and primary actions used by templates and scripts.
|
Short reference of per-page APIs and primary actions used by templates and scripts.
|
||||||
@@ -76,6 +102,21 @@ Short reference of per-page APIs and primary actions used by templates and scrip
|
|||||||
- Data: `POST /api/reporting/summary` (accepts arrays of `{ "result": float }` objects)
|
- Data: `POST /api/reporting/summary` (accepts arrays of `{ "result": float }` objects)
|
||||||
- Actions: Trigger summary refreshes and export/download actions.
|
- Actions: Trigger summary refreshes and export/download actions.
|
||||||
|
|
||||||
|
## Navigation Structure
|
||||||
|
|
||||||
|
The application uses a sidebar navigation menu organized into the following top-level categories:
|
||||||
|
|
||||||
|
- **Dashboard**: Main overview page.
|
||||||
|
- **Overview**: Sub-menu for core scenario inputs.
|
||||||
|
- Parameters: Process parameters configuration.
|
||||||
|
- Costs: Capital and operating costs.
|
||||||
|
- Consumption: Resource consumption tracking.
|
||||||
|
- Production: Production output settings.
|
||||||
|
- Equipment: Equipment inventory (with Maintenance sub-item).
|
||||||
|
- **Simulations**: Monte Carlo simulation runs.
|
||||||
|
- **Analytics**: Reporting and analytics.
|
||||||
|
- **Settings**: Administrative settings (with Themes and Currency Management sub-items).
|
||||||
|
|
||||||
## UI Template Audit (2025-10-20)
|
## UI Template Audit (2025-10-20)
|
||||||
|
|
||||||
- Existing HTML templates: `ScenarioForm.html`, `ParameterInput.html`, and `Dashboard.html` (reporting summary view).
|
- Existing HTML templates: `ScenarioForm.html`, `ParameterInput.html`, and `Dashboard.html` (reporting summary view).
|
||||||
|
|||||||
@@ -1,118 +0,0 @@
|
|||||||
# 14 Testing, CI and Quality Assurance
|
|
||||||
|
|
||||||
This chapter centralizes the project's testing strategy, CI configuration, and quality targets.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
CalMiner uses a combination of unit, integration, and end-to-end tests to ensure quality.
|
|
||||||
|
|
||||||
### Frameworks
|
|
||||||
|
|
||||||
- Backend: pytest for unit and integration tests.
|
|
||||||
- Frontend: pytest with Playwright for E2E tests.
|
|
||||||
- Database: pytest fixtures with psycopg2 for DB tests.
|
|
||||||
|
|
||||||
### Test Types
|
|
||||||
|
|
||||||
- Unit Tests: Test individual functions/modules.
|
|
||||||
- Integration Tests: Test API endpoints and DB interactions.
|
|
||||||
- E2E Tests: Playwright for full user flows.
|
|
||||||
|
|
||||||
### CI/CD
|
|
||||||
|
|
||||||
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
|
||||||
- `test.yml` runs on every push, provisions a temporary Postgres 16 service, waits for readiness, executes the setup script in dry-run and live modes, installs Playwright browsers, and finally runs the full pytest suite.
|
|
||||||
- `build-and-push.yml` builds the Docker image with `docker/build-push-action@v2`, reusing GitHub Actions cache-backed layers, and pushes to the Gitea registry.
|
|
||||||
- `deploy.yml` connects to the target host (via `appleboy/ssh-action`) to pull the freshly pushed image and restart the container.
|
|
||||||
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
|
||||||
- Run tests on pull requests to shared branches; enforce coverage target ≥80% (pytest-cov).
|
|
||||||
|
|
||||||
### Running Tests
|
|
||||||
|
|
||||||
- Unit: `pytest tests/unit/`
|
|
||||||
- E2E: `pytest tests/e2e/`
|
|
||||||
- All: `pytest`
|
|
||||||
|
|
||||||
### Test Directory Structure
|
|
||||||
|
|
||||||
Organize tests under the `tests/` directory mirroring the application structure:
|
|
||||||
|
|
||||||
````text
|
|
||||||
tests/
|
|
||||||
unit/
|
|
||||||
test_<module>.py
|
|
||||||
e2e/
|
|
||||||
test_<flow>.py
|
|
||||||
fixtures/
|
|
||||||
conftest.py
|
|
||||||
```python
|
|
||||||
|
|
||||||
### Fixtures and Test Data
|
|
||||||
|
|
||||||
- Define reusable fixtures in `tests/fixtures/conftest.py`.
|
|
||||||
- Use temporary in-memory databases or isolated schemas for DB tests.
|
|
||||||
- Load sample data via fixtures for consistent test environments.
|
|
||||||
- Leverage the `seeded_ui_data` fixture in `tests/unit/conftest.py` to populate scenarios with related cost, maintenance, and simulation records for deterministic UI route checks.
|
|
||||||
|
|
||||||
### E2E (Playwright) Tests
|
|
||||||
|
|
||||||
The E2E test suite, located in `tests/e2e/`, uses Playwright to simulate user interactions in a live browser environment. These tests are designed to catch issues in the UI, frontend-backend integration, and overall application flow.
|
|
||||||
|
|
||||||
#### Fixtures
|
|
||||||
|
|
||||||
- `live_server`: A session-scoped fixture that launches the FastAPI application in a separate process, making it accessible to the browser.
|
|
||||||
- `playwright_instance`, `browser`, `page`: Standard `pytest-playwright` fixtures for managing the Playwright instance, browser, and individual pages.
|
|
||||||
|
|
||||||
#### Smoke Tests
|
|
||||||
|
|
||||||
- UI Page Loading: `test_smoke.py` contains a parameterized test that systematically navigates to all UI routes to ensure they load without errors, have the correct title, and display a primary heading.
|
|
||||||
- Form Submissions: Each major form in the application has a corresponding test file (e.g., `test_scenarios.py`, `test_costs.py`) that verifies: page loads, create item by filling the form, success message, and UI updates.
|
|
||||||
|
|
||||||
### Running E2E Tests
|
|
||||||
|
|
||||||
To run the Playwright tests:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pytest tests/e2e/
|
|
||||||
````
|
|
||||||
|
|
||||||
To run headed mode:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pytest tests/e2e/ --headed
|
|
||||||
```
|
|
||||||
|
|
||||||
### Mocking and Dependency Injection
|
|
||||||
|
|
||||||
- Use `unittest.mock` to mock external dependencies.
|
|
||||||
- Inject dependencies via function parameters or FastAPI's dependency overrides in tests.
|
|
||||||
|
|
||||||
### Code Coverage
|
|
||||||
|
|
||||||
- Install `pytest-cov` to generate coverage reports.
|
|
||||||
- Run with coverage: `pytest --cov --cov-report=term` (use `--cov-report=html` when visualizing hotspots).
|
|
||||||
- Target 95%+ overall coverage. Focus on historically low modules: `services/simulation.py`, `services/reporting.py`, `middleware/validation.py`, and `routes/ui.py`.
|
|
||||||
- Latest snapshot (2025-10-21): `pytest --cov=. --cov-report=term-missing` returns **91%** overall coverage.
|
|
||||||
|
|
||||||
### CI Integration
|
|
||||||
|
|
||||||
`test.yml` encapsulates the steps below:
|
|
||||||
|
|
||||||
- Check out the repository and set up Python 3.10.
|
|
||||||
- Configure the runner's apt proxy (if available), install project dependencies (requirements + test extras), and download Playwright browsers.
|
|
||||||
- Run `pytest` (extend with `--cov` flags when enforcing coverage).
|
|
||||||
|
|
||||||
> The pip cache step is temporarily disabled in `test.yml` until the self-hosted cache service is exposed (see `docs/ci-cache-troubleshooting.md`).
|
|
||||||
|
|
||||||
`build-and-push.yml` adds:
|
|
||||||
|
|
||||||
- Registry login using repository secrets.
|
|
||||||
- Docker image build/push with GHA cache storage (`cache-from/cache-to` set to `type=gha`).
|
|
||||||
|
|
||||||
`deploy.yml` handles:
|
|
||||||
|
|
||||||
- SSH into the deployment host.
|
|
||||||
- Pull the tagged image from the registry.
|
|
||||||
- Stop, remove, and relaunch the `calminer` container exposing port 8000.
|
|
||||||
|
|
||||||
When adding new workflows, mirror this structure to ensure secrets, caching, and deployment steps remain aligned with the production environment.
|
|
||||||
@@ -16,11 +16,11 @@ This folder mirrors the arc42 chapter structure (adapted to Markdown).
|
|||||||
- [05 Building Block View](05_building_block_view.md)
|
- [05 Building Block View](05_building_block_view.md)
|
||||||
- [06 Runtime View](06_runtime_view.md)
|
- [06 Runtime View](06_runtime_view.md)
|
||||||
- [07 Deployment View](07_deployment_view.md)
|
- [07 Deployment View](07_deployment_view.md)
|
||||||
|
- [Testing & CI](07_deployment/07_01_testing_ci.md.md)
|
||||||
- [08 Concepts](08_concepts.md)
|
- [08 Concepts](08_concepts.md)
|
||||||
- [09 Architecture Decisions](09_architecture_decisions.md)
|
- [09 Architecture Decisions](09_architecture_decisions.md)
|
||||||
- [10 Quality Requirements](10_quality_requirements.md)
|
- [10 Quality Requirements](10_quality_requirements.md)
|
||||||
- [11 Technical Risks](11_technical_risks.md)
|
- [11 Technical Risks](11_technical_risks.md)
|
||||||
- [12 Glossary](12_glossary.md)
|
- [12 Glossary](12_glossary.md)
|
||||||
- [13 UI and Style](13_ui_and_style.md)
|
- [13 UI and Style](13_ui_and_style.md)
|
||||||
- [14 Testing & CI](14_testing_ci.md)
|
|
||||||
- [15 Development Setup](15_development_setup.md)
|
- [15 Development Setup](15_development_setup.md)
|
||||||
|
|||||||
@@ -52,6 +52,15 @@ If you maintain a Postgres or Redis dependency locally, consider authoring a `do
|
|||||||
- **API base URL**: `http://localhost:8000/api`
|
- **API base URL**: `http://localhost:8000/api`
|
||||||
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
||||||
|
|
||||||
|
### Theme configuration
|
||||||
|
|
||||||
|
- Open `/ui/settings` to access the Settings dashboard. The **Theme Colors** form lists every CSS variable persisted in the `application_setting` table. Updates apply immediately across the UI once saved.
|
||||||
|
- Use the accompanying API endpoints for automation or integration tests:
|
||||||
|
- `GET /api/settings/css` returns the active variables, defaults, and metadata describing any environment overrides.
|
||||||
|
- `PUT /api/settings/css` accepts a payload such as `{"variables": {"--color-primary": "#112233"}}` and persists the change unless an environment override is in place.
|
||||||
|
- Environment variables prefixed with `CALMINER_THEME_` win over database values. For example, setting `CALMINER_THEME_COLOR_PRIMARY="#112233"` renders the corresponding input read-only and surfaces the override in the Environment Overrides table.
|
||||||
|
- Acceptable values include hex (`#rrggbb` or `#rrggbbaa`), `rgb()/rgba()`, and `hsl()/hsla()` expressions with the expected number of components. Invalid inputs trigger a validation error and the API responds with HTTP 422.
|
||||||
|
|
||||||
## Dashboard Preview
|
## Dashboard Preview
|
||||||
|
|
||||||
1. Start the FastAPI server and navigate to `/`.
|
1. Start the FastAPI server and navigate to `/`.
|
||||||
@@ -70,7 +79,7 @@ E2E tests use Playwright and a session-scoped `live_server` fixture that starts
|
|||||||
|
|
||||||
## Migrations & Baseline
|
## Migrations & Baseline
|
||||||
|
|
||||||
A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX.
|
A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, provisions the `application_setting` store for configurable UI/system options, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX.
|
||||||
|
|
||||||
Configure granular database settings in your PowerShell session before running migrations:
|
Configure granular database settings in your PowerShell session before running migrations:
|
||||||
|
|
||||||
@@ -88,6 +97,8 @@ python scripts/setup_database.py --run-migrations --seed-data
|
|||||||
|
|
||||||
The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API.
|
The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API.
|
||||||
|
|
||||||
|
> ℹ️ When `--seed-data` is supplied without `--run-migrations`, the bootstrap script automatically applies any pending SQL migrations first so the `application_setting` table (and future settings-backed features) are present before seeding.
|
||||||
|
|
||||||
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
||||||
|
|
||||||
## Database bootstrap workflow
|
## Database bootstrap workflow
|
||||||
@@ -157,8 +168,6 @@ docker compose -f docker-compose.postgres.yml down
|
|||||||
docker volume rm calminer_postgres_local_postgres_data # optional cleanup
|
docker volume rm calminer_postgres_local_postgres_data # optional cleanup
|
||||||
```
|
```
|
||||||
|
|
||||||
Document successful runs (or issues encountered) in `.github/instructions/DONE.TODO.md` for future reference.
|
|
||||||
|
|
||||||
### Seeding reference data
|
### Seeding reference data
|
||||||
|
|
||||||
`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required:
|
`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required:
|
||||||
@@ -190,18 +199,18 @@ After a failure and rollback, rerun the full setup once the environment issues a
|
|||||||
|
|
||||||
The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container and runs the setup script twice: once with `--dry-run` to validate the plan and again without it to apply migrations and seeds. No external secrets are required; the workflow sets the following environment variables for both invocations and for pytest:
|
The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container and runs the setup script twice: once with `--dry-run` to validate the plan and again without it to apply migrations and seeds. No external secrets are required; the workflow sets the following environment variables for both invocations and for pytest:
|
||||||
|
|
||||||
| Variable | Value | Purpose |
|
| Variable | Value | Purpose |
|
||||||
| --- | --- | --- |
|
| ----------------------------- | ------------- | ------------------------------------------------- |
|
||||||
| `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script |
|
| `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script |
|
||||||
| `DATABASE_HOST` | `postgres` | Hostname of the Postgres job service container |
|
| `DATABASE_HOST` | `postgres` | Hostname of the Postgres job service container |
|
||||||
| `DATABASE_PORT` | `5432` | Default service port |
|
| `DATABASE_PORT` | `5432` | Default service port |
|
||||||
| `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow |
|
| `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow |
|
||||||
| `DATABASE_USER` | `calminer` | Application role used during tests |
|
| `DATABASE_USER` | `calminer` | Application role used during tests |
|
||||||
| `DATABASE_PASSWORD` | `secret` | Password for both admin and app role |
|
| `DATABASE_PASSWORD` | `secret` | Password for both admin and app role |
|
||||||
| `DATABASE_SCHEMA` | `public` | Default schema for the tests |
|
| `DATABASE_SCHEMA` | `public` | Default schema for the tests |
|
||||||
| `DATABASE_SUPERUSER` | `calminer` | Setup script uses the same role for admin actions |
|
| `DATABASE_SUPERUSER` | `calminer` | Setup script uses the same role for admin actions |
|
||||||
| `DATABASE_SUPERUSER_PASSWORD` | `secret` | Matches the Postgres service password |
|
| `DATABASE_SUPERUSER_PASSWORD` | `secret` | Matches the Postgres service password |
|
||||||
| `DATABASE_SUPERUSER_DB` | `calminer_ci` | Database to connect to for admin operations |
|
| `DATABASE_SUPERUSER_DB` | `calminer_ci` | Database to connect to for admin operations |
|
||||||
|
|
||||||
The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally.
|
The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally.
|
||||||
|
|
||||||
@@ -217,8 +226,6 @@ Recommended execution order:
|
|||||||
2. Execute the live run with the same flags minus `--dry-run` to provision the database, role grants, migrations, and seed data. Save the log as `reports/setup_staging_apply.log`.
|
2. Execute the live run with the same flags minus `--dry-run` to provision the database, role grants, migrations, and seed data. Save the log as `reports/setup_staging_apply.log`.
|
||||||
3. Repeat the dry run to verify idempotency and record the result (for example `reports/setup_staging_post_apply.log`).
|
3. Repeat the dry run to verify idempotency and record the result (for example `reports/setup_staging_post_apply.log`).
|
||||||
|
|
||||||
Record any issues in `.github/instructions/TODO.md` or `.github/instructions/DONE.TODO.md` as appropriate so the team can track follow-up actions.
|
|
||||||
|
|
||||||
## Database Objects
|
## Database Objects
|
||||||
|
|
||||||
The database contains tables such as `capex`, `opex`, `chemical_consumption`, `fuel_consumption`, `water_consumption`, `scrap_consumption`, `production_output`, `equipment_operation`, `ore_batch`, `exchange_rate`, and `simulation_result`.
|
The database contains tables such as `capex`, `opex`, `chemical_consumption`, `fuel_consumption`, `water_consumption`, `scrap_consumption`, `production_output`, `equipment_operation`, `ore_batch`, `exchange_rate`, and `simulation_result`.
|
||||||
@@ -234,7 +241,7 @@ The database contains tables such as `capex`, `opex`, `chemical_consumption`, `f
|
|||||||
## Where to look next
|
## Where to look next
|
||||||
|
|
||||||
- Architecture overview & chapters: [architecture](architecture/README.md) (per-chapter files under `docs/architecture/`)
|
- Architecture overview & chapters: [architecture](architecture/README.md) (per-chapter files under `docs/architecture/`)
|
||||||
- [Testing & CI](architecture/14_testing_ci.md)
|
- [Testing & CI](architecture/07_deployment/07_01_testing_ci.md.md)
|
||||||
- [Development setup](architecture/15_development_setup.md)
|
- [Development setup](architecture/15_development_setup.md)
|
||||||
- Implementation plan & roadmap: [Solution strategy](architecture/04_solution_strategy.md)
|
- Implementation plan & roadmap: [Solution strategy](architecture/04_solution_strategy.md)
|
||||||
- Routes: [routes](../routes/)
|
- Routes: [routes](../routes/)
|
||||||
|
|||||||
@@ -16,18 +16,18 @@ This guide outlines how to provision and validate the CalMiner staging database
|
|||||||
|
|
||||||
Populate the following environment variables before invoking the setup script. Store them in a secure location such as `config/setup_staging.env` (excluded from source control) and load them with `dotenv` or your shell profile.
|
Populate the following environment variables before invoking the setup script. Store them in a secure location such as `config/setup_staging.env` (excluded from source control) and load them with `dotenv` or your shell profile.
|
||||||
|
|
||||||
| Variable | Description |
|
| Variable | Description |
|
||||||
| --- | --- |
|
| ----------------------------- | ----------------------------------------------------------------------------------------- |
|
||||||
| `DATABASE_HOST` | Staging PostgreSQL hostname or IP (for example `staging-db.internal`). |
|
| `DATABASE_HOST` | Staging PostgreSQL hostname or IP (for example `staging-db.internal`). |
|
||||||
| `DATABASE_PORT` | Port exposed by the staging PostgreSQL service (default `5432`). |
|
| `DATABASE_PORT` | Port exposed by the staging PostgreSQL service (default `5432`). |
|
||||||
| `DATABASE_NAME` | CalMiner staging database name (for example `calminer_staging`). |
|
| `DATABASE_NAME` | CalMiner staging database name (for example `calminer_staging`). |
|
||||||
| `DATABASE_USER` | Application role used by the FastAPI app (for example `calminer_app`). |
|
| `DATABASE_USER` | Application role used by the FastAPI app (for example `calminer_app`). |
|
||||||
| `DATABASE_PASSWORD` | Password for the application role. |
|
| `DATABASE_PASSWORD` | Password for the application role. |
|
||||||
| `DATABASE_SCHEMA` | Optional non-public schema; omit or set to `public` otherwise. |
|
| `DATABASE_SCHEMA` | Optional non-public schema; omit or set to `public` otherwise. |
|
||||||
| `DATABASE_SUPERUSER` | Administrative role with rights to create roles/databases (for example `calminer_admin`). |
|
| `DATABASE_SUPERUSER` | Administrative role with rights to create roles/databases (for example `calminer_admin`). |
|
||||||
| `DATABASE_SUPERUSER_PASSWORD` | Password for the administrative role. |
|
| `DATABASE_SUPERUSER_PASSWORD` | Password for the administrative role. |
|
||||||
| `DATABASE_SUPERUSER_DB` | Database to connect to for admin tasks (default `postgres`). |
|
| `DATABASE_SUPERUSER_DB` | Database to connect to for admin tasks (default `postgres`). |
|
||||||
| `DATABASE_ADMIN_URL` | Optional DSN that overrides the granular admin settings above. |
|
| `DATABASE_ADMIN_URL` | Optional DSN that overrides the granular admin settings above. |
|
||||||
|
|
||||||
You may also set `DATABASE_URL` for application runtime convenience, but the setup script only requires the values listed in the table.
|
You may also set `DATABASE_URL` for application runtime convenience, but the setup script only requires the values listed in the table.
|
||||||
|
|
||||||
@@ -98,4 +98,3 @@ Run the setup script in three phases to validate idempotency and capture diagnos
|
|||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
- Keep this document updated as staging infrastructure evolves (for example, when migrating to managed services or rotating credentials).
|
- Keep this document updated as staging infrastructure evolves (for example, when migrating to managed services or rotating credentials).
|
||||||
- Once staging validation is complete, summarize the outcome in `.github/instructions/DONE.TODO.md` and cross-link the relevant log files.
|
|
||||||
|
|||||||
5
main.py
5
main.py
@@ -16,6 +16,8 @@ from routes.reporting import router as reporting_router
|
|||||||
from routes.currencies import router as currencies_router
|
from routes.currencies import router as currencies_router
|
||||||
from routes.simulations import router as simulations_router
|
from routes.simulations import router as simulations_router
|
||||||
from routes.maintenance import router as maintenance_router
|
from routes.maintenance import router as maintenance_router
|
||||||
|
from routes.settings import router as settings_router
|
||||||
|
from routes.users import router as users_router
|
||||||
|
|
||||||
# Initialize database schema
|
# Initialize database schema
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
@@ -29,6 +31,7 @@ async def json_validation(
|
|||||||
) -> Response:
|
) -> Response:
|
||||||
return await validate_json(request, call_next)
|
return await validate_json(request, call_next)
|
||||||
|
|
||||||
|
|
||||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||||
|
|
||||||
# Include API routers
|
# Include API routers
|
||||||
@@ -43,4 +46,6 @@ app.include_router(equipment_router)
|
|||||||
app.include_router(maintenance_router)
|
app.include_router(maintenance_router)
|
||||||
app.include_router(reporting_router)
|
app.include_router(reporting_router)
|
||||||
app.include_router(currencies_router)
|
app.include_router(currencies_router)
|
||||||
|
app.include_router(settings_router)
|
||||||
app.include_router(ui_router)
|
app.include_router(ui_router)
|
||||||
|
app.include_router(users_router)
|
||||||
|
|||||||
@@ -4,7 +4,10 @@ from fastapi import HTTPException, Request, Response
|
|||||||
|
|
||||||
MiddlewareCallNext = Callable[[Request], Awaitable[Response]]
|
MiddlewareCallNext = Callable[[Request], Awaitable[Response]]
|
||||||
|
|
||||||
async def validate_json(request: Request, call_next: MiddlewareCallNext) -> Response:
|
|
||||||
|
async def validate_json(
|
||||||
|
request: Request, call_next: MiddlewareCallNext
|
||||||
|
) -> Response:
|
||||||
# Only validate JSON for requests with a body
|
# Only validate JSON for requests with a body
|
||||||
if request.method in ("POST", "PUT", "PATCH"):
|
if request.method in ("POST", "PUT", "PATCH"):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
"""
|
"""
|
||||||
models package initializer. Import the currency model so it's registered
|
models package initializer. Import key models so they're registered
|
||||||
with the shared Base.metadata when the package is imported by tests.
|
with the shared Base.metadata when the package is imported by tests.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from . import application_setting # noqa: F401
|
||||||
from . import currency # noqa: F401
|
from . import currency # noqa: F401
|
||||||
|
from . import role # noqa: F401
|
||||||
|
from . import user # noqa: F401
|
||||||
|
from . import theme_setting # noqa: F401
|
||||||
|
|||||||
38
models/application_setting.py
Normal file
38
models/application_setting.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, DateTime, String, Text
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationSetting(Base):
|
||||||
|
__tablename__ = "application_setting"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
||||||
|
key: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
|
||||||
|
value: Mapped[str] = mapped_column(Text, nullable=False)
|
||||||
|
value_type: Mapped[str] = mapped_column(
|
||||||
|
String(32), nullable=False, default="string"
|
||||||
|
)
|
||||||
|
category: Mapped[str] = mapped_column(
|
||||||
|
String(32), nullable=False, default="general"
|
||||||
|
)
|
||||||
|
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||||
|
is_editable: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, nullable=False, default=True
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
server_default=func.now(),
|
||||||
|
onupdate=func.now(),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<ApplicationSetting key={self.key} category={self.category}>"
|
||||||
@@ -29,8 +29,9 @@ class Capex(Base):
|
|||||||
@currency_code.setter
|
@currency_code.setter
|
||||||
def currency_code(self, value: str) -> None:
|
def currency_code(self, value: str) -> None:
|
||||||
# store pending code so application code or migrations can pick it up
|
# store pending code so application code or migrations can pick it up
|
||||||
setattr(self, "_currency_code_pending",
|
setattr(
|
||||||
(value or "USD").strip().upper())
|
self, "_currency_code_pending", (value or "USD").strip().upper()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# SQLAlchemy event handlers to ensure currency_id is set before insert/update
|
# SQLAlchemy event handlers to ensure currency_id is set before insert/update
|
||||||
@@ -42,22 +43,27 @@ def _resolve_currency(mapper, connection, target):
|
|||||||
return
|
return
|
||||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
code = getattr(target, "_currency_code_pending", None) or "USD"
|
||||||
# Try to find existing currency id
|
# Try to find existing currency id
|
||||||
row = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
row = connection.execute(
|
||||||
"code": code}).fetchone()
|
text("SELECT id FROM currency WHERE code = :code"), {"code": code}
|
||||||
|
).fetchone()
|
||||||
if row:
|
if row:
|
||||||
cid = row[0]
|
cid = row[0]
|
||||||
else:
|
else:
|
||||||
# Insert new currency and attempt to get lastrowid
|
# Insert new currency and attempt to get lastrowid
|
||||||
res = connection.execute(
|
res = connection.execute(
|
||||||
text("INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"),
|
text(
|
||||||
|
"INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"
|
||||||
|
),
|
||||||
{"code": code, "name": code, "symbol": None, "active": True},
|
{"code": code, "name": code, "symbol": None, "active": True},
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
cid = res.lastrowid
|
cid = res.lastrowid
|
||||||
except Exception:
|
except Exception:
|
||||||
# fallback: select after insert
|
# fallback: select after insert
|
||||||
cid = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
cid = connection.execute(
|
||||||
"code": code}).scalar()
|
text("SELECT id FROM currency WHERE code = :code"),
|
||||||
|
{"code": code},
|
||||||
|
).scalar()
|
||||||
target.currency_id = cid
|
target.currency_id = cid
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,11 @@ class Currency(Base):
|
|||||||
|
|
||||||
# reverse relationships (optional)
|
# reverse relationships (optional)
|
||||||
capex_items = relationship(
|
capex_items = relationship(
|
||||||
"Capex", back_populates="currency", lazy="select")
|
"Capex", back_populates="currency", lazy="select"
|
||||||
|
)
|
||||||
opex_items = relationship("Opex", back_populates="currency", lazy="select")
|
opex_items = relationship("Opex", back_populates="currency", lazy="select")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Currency code={self.code} name={self.name} symbol={self.symbol}>"
|
return (
|
||||||
|
f"<Currency code={self.code} name={self.name} symbol={self.symbol}>"
|
||||||
|
)
|
||||||
|
|||||||
@@ -28,28 +28,34 @@ class Opex(Base):
|
|||||||
|
|
||||||
@currency_code.setter
|
@currency_code.setter
|
||||||
def currency_code(self, value: str) -> None:
|
def currency_code(self, value: str) -> None:
|
||||||
setattr(self, "_currency_code_pending",
|
setattr(
|
||||||
(value or "USD").strip().upper())
|
self, "_currency_code_pending", (value or "USD").strip().upper()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _resolve_currency_opex(mapper, connection, target):
|
def _resolve_currency_opex(mapper, connection, target):
|
||||||
if getattr(target, "currency_id", None):
|
if getattr(target, "currency_id", None):
|
||||||
return
|
return
|
||||||
code = getattr(target, "_currency_code_pending", None) or "USD"
|
code = getattr(target, "_currency_code_pending", None) or "USD"
|
||||||
row = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
row = connection.execute(
|
||||||
"code": code}).fetchone()
|
text("SELECT id FROM currency WHERE code = :code"), {"code": code}
|
||||||
|
).fetchone()
|
||||||
if row:
|
if row:
|
||||||
cid = row[0]
|
cid = row[0]
|
||||||
else:
|
else:
|
||||||
res = connection.execute(
|
res = connection.execute(
|
||||||
text("INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"),
|
text(
|
||||||
|
"INSERT INTO currency (code, name, symbol, is_active) VALUES (:code, :name, :symbol, :active)"
|
||||||
|
),
|
||||||
{"code": code, "name": code, "symbol": None, "active": True},
|
{"code": code, "name": code, "symbol": None, "active": True},
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
cid = res.lastrowid
|
cid = res.lastrowid
|
||||||
except Exception:
|
except Exception:
|
||||||
cid = connection.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
cid = connection.execute(
|
||||||
"code": code}).scalar()
|
text("SELECT id FROM currency WHERE code = :code"),
|
||||||
|
{"code": code},
|
||||||
|
).scalar()
|
||||||
target.currency_id = cid
|
target.currency_id = cid
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,14 +10,17 @@ class Parameter(Base):
|
|||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
||||||
scenario_id: Mapped[int] = mapped_column(
|
scenario_id: Mapped[int] = mapped_column(
|
||||||
ForeignKey("scenario.id"), nullable=False)
|
ForeignKey("scenario.id"), nullable=False
|
||||||
|
)
|
||||||
name: Mapped[str] = mapped_column(nullable=False)
|
name: Mapped[str] = mapped_column(nullable=False)
|
||||||
value: Mapped[float] = mapped_column(nullable=False)
|
value: Mapped[float] = mapped_column(nullable=False)
|
||||||
distribution_id: Mapped[Optional[int]] = mapped_column(
|
distribution_id: Mapped[Optional[int]] = mapped_column(
|
||||||
ForeignKey("distribution.id"), nullable=True)
|
ForeignKey("distribution.id"), nullable=True
|
||||||
|
)
|
||||||
distribution_type: Mapped[Optional[str]] = mapped_column(nullable=True)
|
distribution_type: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||||
distribution_parameters: Mapped[Optional[Dict[str, Any]]] = mapped_column(
|
distribution_parameters: Mapped[Optional[Dict[str, Any]]] = mapped_column(
|
||||||
JSON, nullable=True)
|
JSON, nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
scenario = relationship("Scenario", back_populates="parameters")
|
scenario = relationship("Scenario", back_populates="parameters")
|
||||||
distribution = relationship("Distribution")
|
distribution = relationship("Distribution")
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ class ProductionOutput(Base):
|
|||||||
unit_symbol = Column(String(16), nullable=True)
|
unit_symbol = Column(String(16), nullable=True)
|
||||||
|
|
||||||
scenario = relationship(
|
scenario = relationship(
|
||||||
"Scenario", back_populates="production_output_items")
|
"Scenario", back_populates="production_output_items"
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return (
|
return (
|
||||||
|
|||||||
13
models/role.py
Normal file
13
models/role.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Role(Base):
|
||||||
|
__tablename__ = "roles"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
name = Column(String, unique=True, index=True)
|
||||||
|
|
||||||
|
users = relationship("User", back_populates="role")
|
||||||
@@ -20,19 +20,16 @@ class Scenario(Base):
|
|||||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||||
parameters = relationship("Parameter", back_populates="scenario")
|
parameters = relationship("Parameter", back_populates="scenario")
|
||||||
simulation_results = relationship(
|
simulation_results = relationship(
|
||||||
SimulationResult, back_populates="scenario")
|
SimulationResult, back_populates="scenario"
|
||||||
capex_items = relationship(
|
)
|
||||||
Capex, back_populates="scenario")
|
capex_items = relationship(Capex, back_populates="scenario")
|
||||||
opex_items = relationship(
|
opex_items = relationship(Opex, back_populates="scenario")
|
||||||
Opex, back_populates="scenario")
|
consumption_items = relationship(Consumption, back_populates="scenario")
|
||||||
consumption_items = relationship(
|
|
||||||
Consumption, back_populates="scenario")
|
|
||||||
production_output_items = relationship(
|
production_output_items = relationship(
|
||||||
ProductionOutput, back_populates="scenario")
|
ProductionOutput, back_populates="scenario"
|
||||||
equipment_items = relationship(
|
)
|
||||||
Equipment, back_populates="scenario")
|
equipment_items = relationship(Equipment, back_populates="scenario")
|
||||||
maintenance_items = relationship(
|
maintenance_items = relationship(Maintenance, back_populates="scenario")
|
||||||
Maintenance, back_populates="scenario")
|
|
||||||
|
|
||||||
# relationships can be defined later
|
# relationships can be defined later
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
|||||||
15
models/theme_setting.py
Normal file
15
models/theme_setting.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ThemeSetting(Base):
|
||||||
|
__tablename__ = "theme_settings"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
theme_name = Column(String, unique=True, index=True)
|
||||||
|
primary_color = Column(String)
|
||||||
|
secondary_color = Column(String)
|
||||||
|
accent_color = Column(String)
|
||||||
|
background_color = Column(String)
|
||||||
|
text_color = Column(String)
|
||||||
23
models/user.py
Normal file
23
models/user.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, ForeignKey
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
from services.security import get_password_hash, verify_password
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
username = Column(String, unique=True, index=True)
|
||||||
|
email = Column(String, unique=True, index=True)
|
||||||
|
hashed_password = Column(String)
|
||||||
|
role_id = Column(Integer, ForeignKey("roles.id"))
|
||||||
|
|
||||||
|
role = relationship("Role", back_populates="users")
|
||||||
|
|
||||||
|
def set_password(self, password: str):
|
||||||
|
self.hashed_password = get_password_hash(password)
|
||||||
|
|
||||||
|
def check_password(self, password: str) -> bool:
|
||||||
|
return verify_password(password, str(self.hashed_password))
|
||||||
16
pyproject.toml
Normal file
16
pyproject.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[tool.black]
|
||||||
|
line-length = 80
|
||||||
|
target-version = ['py310']
|
||||||
|
include = '\\.pyi?$'
|
||||||
|
exclude = '''
|
||||||
|
/(
|
||||||
|
.git
|
||||||
|
| .hg
|
||||||
|
| .mypy_cache
|
||||||
|
| .tox
|
||||||
|
| .venv
|
||||||
|
| build
|
||||||
|
| dist
|
||||||
|
)/
|
||||||
|
'''
|
||||||
|
|
||||||
1
requirements-dev.txt
Normal file
1
requirements-dev.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
black
|
||||||
@@ -3,3 +3,4 @@ pytest-cov
|
|||||||
pytest-httpx
|
pytest-httpx
|
||||||
playwright
|
playwright
|
||||||
pytest-playwright
|
pytest-playwright
|
||||||
|
ruff
|
||||||
@@ -36,7 +36,9 @@ class ConsumptionRead(ConsumptionBase):
|
|||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ConsumptionRead, status_code=status.HTTP_201_CREATED)
|
@router.post(
|
||||||
|
"/", response_model=ConsumptionRead, status_code=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
def create_consumption(item: ConsumptionCreate, db: Session = Depends(get_db)):
|
def create_consumption(item: ConsumptionCreate, db: Session = Depends(get_db)):
|
||||||
db_item = Consumption(**item.model_dump())
|
db_item = Consumption(**item.model_dump())
|
||||||
db.add(db_item)
|
db.add(db_item)
|
||||||
|
|||||||
@@ -73,7 +73,8 @@ def create_capex(item: CapexCreate, db: Session = Depends(get_db)):
|
|||||||
if not cid:
|
if not cid:
|
||||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
||||||
currency_cls = __import__(
|
currency_cls = __import__(
|
||||||
"models.currency", fromlist=["Currency"]).Currency
|
"models.currency", fromlist=["Currency"]
|
||||||
|
).Currency
|
||||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
||||||
if currency is None:
|
if currency is None:
|
||||||
currency = currency_cls(code=code, name=code, symbol=None)
|
currency = currency_cls(code=code, name=code, symbol=None)
|
||||||
@@ -100,7 +101,8 @@ def create_opex(item: OpexCreate, db: Session = Depends(get_db)):
|
|||||||
if not cid:
|
if not cid:
|
||||||
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
code = (payload.pop("currency_code", "USD") or "USD").strip().upper()
|
||||||
currency_cls = __import__(
|
currency_cls = __import__(
|
||||||
"models.currency", fromlist=["Currency"]).Currency
|
"models.currency", fromlist=["Currency"]
|
||||||
|
).Currency
|
||||||
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
currency = db.query(currency_cls).filter_by(code=code).one_or_none()
|
||||||
if currency is None:
|
if currency is None:
|
||||||
currency = currency_cls(code=code, name=code, symbol=None)
|
currency = currency_cls(code=code, name=code, symbol=None)
|
||||||
|
|||||||
@@ -97,20 +97,20 @@ def _ensure_default_currency(db: Session) -> Currency:
|
|||||||
def _get_currency_or_404(db: Session, code: str) -> Currency:
|
def _get_currency_or_404(db: Session, code: str) -> Currency:
|
||||||
normalized = code.strip().upper()
|
normalized = code.strip().upper()
|
||||||
currency = (
|
currency = (
|
||||||
db.query(Currency)
|
db.query(Currency).filter(Currency.code == normalized).one_or_none()
|
||||||
.filter(Currency.code == normalized)
|
|
||||||
.one_or_none()
|
|
||||||
)
|
)
|
||||||
if currency is None:
|
if currency is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND, detail="Currency not found")
|
status_code=status.HTTP_404_NOT_FOUND, detail="Currency not found"
|
||||||
|
)
|
||||||
return currency
|
return currency
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[CurrencyRead])
|
@router.get("/", response_model=List[CurrencyRead])
|
||||||
def list_currencies(
|
def list_currencies(
|
||||||
include_inactive: bool = Query(
|
include_inactive: bool = Query(
|
||||||
False, description="Include inactive currencies"),
|
False, description="Include inactive currencies"
|
||||||
|
),
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
):
|
):
|
||||||
_ensure_default_currency(db)
|
_ensure_default_currency(db)
|
||||||
@@ -121,14 +121,12 @@ def list_currencies(
|
|||||||
return currencies
|
return currencies
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=CurrencyRead, status_code=status.HTTP_201_CREATED)
|
@router.post(
|
||||||
|
"/", response_model=CurrencyRead, status_code=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)):
|
def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)):
|
||||||
code = payload.code
|
code = payload.code
|
||||||
existing = (
|
existing = db.query(Currency).filter(Currency.code == code).one_or_none()
|
||||||
db.query(Currency)
|
|
||||||
.filter(Currency.code == code)
|
|
||||||
.one_or_none()
|
|
||||||
)
|
|
||||||
if existing is not None:
|
if existing is not None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_409_CONFLICT,
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
@@ -148,7 +146,9 @@ def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)):
|
|||||||
|
|
||||||
|
|
||||||
@router.put("/{code}", response_model=CurrencyRead)
|
@router.put("/{code}", response_model=CurrencyRead)
|
||||||
def update_currency(code: str, payload: CurrencyUpdate, db: Session = Depends(get_db)):
|
def update_currency(
|
||||||
|
code: str, payload: CurrencyUpdate, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
currency = _get_currency_or_404(db, code)
|
currency = _get_currency_or_404(db, code)
|
||||||
|
|
||||||
if payload.name is not None:
|
if payload.name is not None:
|
||||||
@@ -175,7 +175,9 @@ def update_currency(code: str, payload: CurrencyUpdate, db: Session = Depends(ge
|
|||||||
|
|
||||||
|
|
||||||
@router.patch("/{code}/activation", response_model=CurrencyRead)
|
@router.patch("/{code}/activation", response_model=CurrencyRead)
|
||||||
def toggle_currency_activation(code: str, body: CurrencyActivation, db: Session = Depends(get_db)):
|
def toggle_currency_activation(
|
||||||
|
code: str, body: CurrencyActivation, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
currency = _get_currency_or_404(db, code)
|
currency = _get_currency_or_404(db, code)
|
||||||
code_value = getattr(currency, "code")
|
code_value = getattr(currency, "code")
|
||||||
if code_value == DEFAULT_CURRENCY_CODE and body.is_active is False:
|
if code_value == DEFAULT_CURRENCY_CODE and body.is_active is False:
|
||||||
|
|||||||
@@ -22,7 +22,9 @@ class DistributionRead(DistributionCreate):
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=DistributionRead)
|
@router.post("/", response_model=DistributionRead)
|
||||||
async def create_distribution(dist: DistributionCreate, db: Session = Depends(get_db)):
|
async def create_distribution(
|
||||||
|
dist: DistributionCreate, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
db_dist = Distribution(**dist.model_dump())
|
db_dist = Distribution(**dist.model_dump())
|
||||||
db.add(db_dist)
|
db.add(db_dist)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|||||||
@@ -23,7 +23,9 @@ class EquipmentRead(EquipmentCreate):
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=EquipmentRead)
|
@router.post("/", response_model=EquipmentRead)
|
||||||
async def create_equipment(item: EquipmentCreate, db: Session = Depends(get_db)):
|
async def create_equipment(
|
||||||
|
item: EquipmentCreate, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
db_item = Equipment(**item.model_dump())
|
db_item = Equipment(**item.model_dump())
|
||||||
db.add(db_item)
|
db.add(db_item)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|||||||
@@ -34,8 +34,9 @@ class MaintenanceRead(MaintenanceBase):
|
|||||||
|
|
||||||
|
|
||||||
def _get_maintenance_or_404(db: Session, maintenance_id: int) -> Maintenance:
|
def _get_maintenance_or_404(db: Session, maintenance_id: int) -> Maintenance:
|
||||||
maintenance = db.query(Maintenance).filter(
|
maintenance = (
|
||||||
Maintenance.id == maintenance_id).first()
|
db.query(Maintenance).filter(Maintenance.id == maintenance_id).first()
|
||||||
|
)
|
||||||
if maintenance is None:
|
if maintenance is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
@@ -44,8 +45,12 @@ def _get_maintenance_or_404(db: Session, maintenance_id: int) -> Maintenance:
|
|||||||
return maintenance
|
return maintenance
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=MaintenanceRead, status_code=status.HTTP_201_CREATED)
|
@router.post(
|
||||||
def create_maintenance(maintenance: MaintenanceCreate, db: Session = Depends(get_db)):
|
"/", response_model=MaintenanceRead, status_code=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
|
def create_maintenance(
|
||||||
|
maintenance: MaintenanceCreate, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
db_maintenance = Maintenance(**maintenance.model_dump())
|
db_maintenance = Maintenance(**maintenance.model_dump())
|
||||||
db.add(db_maintenance)
|
db.add(db_maintenance)
|
||||||
db.commit()
|
db.commit()
|
||||||
@@ -54,7 +59,9 @@ def create_maintenance(maintenance: MaintenanceCreate, db: Session = Depends(get
|
|||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[MaintenanceRead])
|
@router.get("/", response_model=List[MaintenanceRead])
|
||||||
def list_maintenance(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
|
def list_maintenance(
|
||||||
|
skip: int = 0, limit: int = 100, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
return db.query(Maintenance).offset(skip).limit(limit).all()
|
return db.query(Maintenance).offset(skip).limit(limit).all()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,12 +30,15 @@ class ParameterCreate(BaseModel):
|
|||||||
return None
|
return None
|
||||||
if normalized not in {"normal", "uniform", "triangular"}:
|
if normalized not in {"normal", "uniform", "triangular"}:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"distribution_type must be normal, uniform, or triangular")
|
"distribution_type must be normal, uniform, or triangular"
|
||||||
|
)
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@field_validator("distribution_parameters")
|
@field_validator("distribution_parameters")
|
||||||
@classmethod
|
@classmethod
|
||||||
def empty_dict_to_none(cls, value: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
def empty_dict_to_none(
|
||||||
|
cls, value: Optional[Dict[str, Any]]
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
return value or None
|
return value or None
|
||||||
@@ -45,6 +48,7 @@ class ParameterRead(ParameterCreate):
|
|||||||
id: int
|
id: int
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ParameterRead)
|
@router.post("/", response_model=ParameterRead)
|
||||||
def create_parameter(param: ParameterCreate, db: Session = Depends(get_db)):
|
def create_parameter(param: ParameterCreate, db: Session = Depends(get_db)):
|
||||||
scen = db.query(Scenario).filter(Scenario.id == param.scenario_id).first()
|
scen = db.query(Scenario).filter(Scenario.id == param.scenario_id).first()
|
||||||
@@ -55,11 +59,15 @@ def create_parameter(param: ParameterCreate, db: Session = Depends(get_db)):
|
|||||||
distribution_parameters = param.distribution_parameters
|
distribution_parameters = param.distribution_parameters
|
||||||
|
|
||||||
if distribution_id is not None:
|
if distribution_id is not None:
|
||||||
distribution = db.query(Distribution).filter(
|
distribution = (
|
||||||
Distribution.id == distribution_id).first()
|
db.query(Distribution)
|
||||||
|
.filter(Distribution.id == distribution_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
if not distribution:
|
if not distribution:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=404, detail="Distribution not found")
|
status_code=404, detail="Distribution not found"
|
||||||
|
)
|
||||||
distribution_type = distribution.distribution_type
|
distribution_type = distribution.distribution_type
|
||||||
distribution_parameters = distribution.parameters or None
|
distribution_parameters = distribution.parameters or None
|
||||||
|
|
||||||
|
|||||||
@@ -36,8 +36,14 @@ class ProductionOutputRead(ProductionOutputBase):
|
|||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ProductionOutputRead, status_code=status.HTTP_201_CREATED)
|
@router.post(
|
||||||
def create_production(item: ProductionOutputCreate, db: Session = Depends(get_db)):
|
"/",
|
||||||
|
response_model=ProductionOutputRead,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
def create_production(
|
||||||
|
item: ProductionOutputCreate, db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
db_item = ProductionOutput(**item.model_dump())
|
db_item = ProductionOutput(**item.model_dump())
|
||||||
db.add(db_item)
|
db.add(db_item)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ class ScenarioRead(ScenarioCreate):
|
|||||||
updated_at: Optional[datetime] = None
|
updated_at: Optional[datetime] = None
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=ScenarioRead)
|
@router.post("/", response_model=ScenarioRead)
|
||||||
def create_scenario(scenario: ScenarioCreate, db: Session = Depends(get_db)):
|
def create_scenario(scenario: ScenarioCreate, db: Session = Depends(get_db)):
|
||||||
db_s = db.query(Scenario).filter(Scenario.name == scenario.name).first()
|
db_s = db.query(Scenario).filter(Scenario.name == scenario.name).first()
|
||||||
|
|||||||
110
routes/settings.py
Normal file
110
routes/settings.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from pydantic import BaseModel, Field, model_validator
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from routes.dependencies import get_db
|
||||||
|
from services.settings import (
|
||||||
|
CSS_COLOR_DEFAULTS,
|
||||||
|
get_css_color_settings,
|
||||||
|
list_css_env_override_rows,
|
||||||
|
read_css_color_env_overrides,
|
||||||
|
update_css_color_settings,
|
||||||
|
get_theme_settings,
|
||||||
|
save_theme_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/settings", tags=["Settings"])
|
||||||
|
|
||||||
|
|
||||||
|
class CSSSettingsPayload(BaseModel):
|
||||||
|
variables: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_allowed_keys(self) -> "CSSSettingsPayload":
|
||||||
|
invalid = set(self.variables.keys()) - set(CSS_COLOR_DEFAULTS.keys())
|
||||||
|
if invalid:
|
||||||
|
invalid_keys = ", ".join(sorted(invalid))
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported CSS variables: {invalid_keys}."
|
||||||
|
" Accepted keys align with the default theme variables."
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class EnvOverride(BaseModel):
|
||||||
|
css_key: str
|
||||||
|
env_var: str
|
||||||
|
value: str
|
||||||
|
|
||||||
|
|
||||||
|
class CSSSettingsResponse(BaseModel):
|
||||||
|
variables: Dict[str, str]
|
||||||
|
env_overrides: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
env_sources: List[EnvOverride] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/css", response_model=CSSSettingsResponse)
|
||||||
|
def read_css_settings(db: Session = Depends(get_db)) -> CSSSettingsResponse:
|
||||||
|
try:
|
||||||
|
values = get_css_color_settings(db)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_sources = [
|
||||||
|
EnvOverride(**row) for row in list_css_env_override_rows()
|
||||||
|
]
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
return CSSSettingsResponse(
|
||||||
|
variables=values,
|
||||||
|
env_overrides=env_overrides,
|
||||||
|
env_sources=env_sources,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put(
|
||||||
|
"/css", response_model=CSSSettingsResponse, status_code=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
def update_css_settings(
|
||||||
|
payload: CSSSettingsPayload, db: Session = Depends(get_db)
|
||||||
|
) -> CSSSettingsResponse:
|
||||||
|
try:
|
||||||
|
values = update_css_color_settings(db, payload.variables)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_sources = [
|
||||||
|
EnvOverride(**row) for row in list_css_env_override_rows()
|
||||||
|
]
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
return CSSSettingsResponse(
|
||||||
|
variables=values,
|
||||||
|
env_overrides=env_overrides,
|
||||||
|
env_sources=env_sources,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ThemeSettings(BaseModel):
|
||||||
|
theme_name: str
|
||||||
|
primary_color: str
|
||||||
|
secondary_color: str
|
||||||
|
accent_color: str
|
||||||
|
background_color: str
|
||||||
|
text_color: str
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/theme")
|
||||||
|
async def update_theme(theme_data: ThemeSettings, db: Session = Depends(get_db)):
|
||||||
|
data_dict = theme_data.model_dump()
|
||||||
|
saved = save_theme_settings(db, data_dict)
|
||||||
|
return {"message": "Theme updated", "theme": data_dict}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/theme")
|
||||||
|
async def get_theme(db: Session = Depends(get_db)):
|
||||||
|
return get_theme_settings(db)
|
||||||
@@ -43,7 +43,9 @@ class SimulationRunResponse(BaseModel):
|
|||||||
summary: Dict[str, float | int]
|
summary: Dict[str, float | int]
|
||||||
|
|
||||||
|
|
||||||
def _load_parameters(db: Session, scenario_id: int) -> List[SimulationParameterInput]:
|
def _load_parameters(
|
||||||
|
db: Session, scenario_id: int
|
||||||
|
) -> List[SimulationParameterInput]:
|
||||||
db_params = (
|
db_params = (
|
||||||
db.query(Parameter)
|
db.query(Parameter)
|
||||||
.filter(Parameter.scenario_id == scenario_id)
|
.filter(Parameter.scenario_id == scenario_id)
|
||||||
@@ -60,17 +62,19 @@ def _load_parameters(db: Session, scenario_id: int) -> List[SimulationParameterI
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/run", response_model=SimulationRunResponse)
|
@router.post("/run", response_model=SimulationRunResponse)
|
||||||
async def simulate(payload: SimulationRunRequest, db: Session = Depends(get_db)):
|
async def simulate(
|
||||||
scenario = db.query(Scenario).filter(
|
payload: SimulationRunRequest, db: Session = Depends(get_db)
|
||||||
Scenario.id == payload.scenario_id).first()
|
):
|
||||||
|
scenario = (
|
||||||
|
db.query(Scenario).filter(Scenario.id == payload.scenario_id).first()
|
||||||
|
)
|
||||||
if scenario is None:
|
if scenario is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
detail="Scenario not found",
|
detail="Scenario not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
parameters = payload.parameters or _load_parameters(
|
parameters = payload.parameters or _load_parameters(db, payload.scenario_id)
|
||||||
db, payload.scenario_id)
|
|
||||||
if not parameters:
|
if not parameters:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
|||||||
231
routes/ui.py
231
routes/ui.py
@@ -20,6 +20,12 @@ from routes.dependencies import get_db
|
|||||||
from services.reporting import generate_report
|
from services.reporting import generate_report
|
||||||
from models.currency import Currency
|
from models.currency import Currency
|
||||||
from routes.currencies import DEFAULT_CURRENCY_CODE, _ensure_default_currency
|
from routes.currencies import DEFAULT_CURRENCY_CODE, _ensure_default_currency
|
||||||
|
from services.settings import (
|
||||||
|
CSS_COLOR_DEFAULTS,
|
||||||
|
get_css_color_settings,
|
||||||
|
list_css_env_override_rows,
|
||||||
|
read_css_color_env_overrides,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
CURRENCY_CHOICES: list[Dict[str, Any]] = [
|
CURRENCY_CHOICES: list[Dict[str, Any]] = [
|
||||||
@@ -47,7 +53,9 @@ router = APIRouter()
|
|||||||
templates = Jinja2Templates(directory="templates")
|
templates = Jinja2Templates(directory="templates")
|
||||||
|
|
||||||
|
|
||||||
def _context(request: Request, extra: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
def _context(
|
||||||
|
request: Request, extra: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"request": request,
|
"request": request,
|
||||||
"current_year": datetime.now(timezone.utc).year,
|
"current_year": datetime.now(timezone.utc).year,
|
||||||
@@ -92,7 +100,9 @@ def _load_scenarios(db: Session) -> Dict[str, Any]:
|
|||||||
|
|
||||||
def _load_parameters(db: Session) -> Dict[str, Any]:
|
def _load_parameters(db: Session) -> Dict[str, Any]:
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for param in db.query(Parameter).order_by(Parameter.scenario_id, Parameter.id):
|
for param in db.query(Parameter).order_by(
|
||||||
|
Parameter.scenario_id, Parameter.id
|
||||||
|
):
|
||||||
grouped[param.scenario_id].append(
|
grouped[param.scenario_id].append(
|
||||||
{
|
{
|
||||||
"id": param.id,
|
"id": param.id,
|
||||||
@@ -107,27 +117,20 @@ def _load_parameters(db: Session) -> Dict[str, Any]:
|
|||||||
|
|
||||||
def _load_costs(db: Session) -> Dict[str, Any]:
|
def _load_costs(db: Session) -> Dict[str, Any]:
|
||||||
capex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
capex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for capex in (
|
for capex in db.query(Capex).order_by(Capex.scenario_id, Capex.id).all():
|
||||||
db.query(Capex)
|
|
||||||
.order_by(Capex.scenario_id, Capex.id)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
capex_grouped[int(getattr(capex, "scenario_id"))].append(
|
capex_grouped[int(getattr(capex, "scenario_id"))].append(
|
||||||
{
|
{
|
||||||
"id": int(getattr(capex, "id")),
|
"id": int(getattr(capex, "id")),
|
||||||
"scenario_id": int(getattr(capex, "scenario_id")),
|
"scenario_id": int(getattr(capex, "scenario_id")),
|
||||||
"amount": float(getattr(capex, "amount", 0.0)),
|
"amount": float(getattr(capex, "amount", 0.0)),
|
||||||
"description": getattr(capex, "description", "") or "",
|
"description": getattr(capex, "description", "") or "",
|
||||||
"currency_code": getattr(capex, "currency_code", "USD") or "USD",
|
"currency_code": getattr(capex, "currency_code", "USD")
|
||||||
|
or "USD",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
opex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
opex_grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for opex in (
|
for opex in db.query(Opex).order_by(Opex.scenario_id, Opex.id).all():
|
||||||
db.query(Opex)
|
|
||||||
.order_by(Opex.scenario_id, Opex.id)
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
opex_grouped[int(getattr(opex, "scenario_id"))].append(
|
opex_grouped[int(getattr(opex, "scenario_id"))].append(
|
||||||
{
|
{
|
||||||
"id": int(getattr(opex, "id")),
|
"id": int(getattr(opex, "id")),
|
||||||
@@ -146,9 +149,15 @@ def _load_costs(db: Session) -> Dict[str, Any]:
|
|||||||
|
|
||||||
def _load_currencies(db: Session) -> Dict[str, Any]:
|
def _load_currencies(db: Session) -> Dict[str, Any]:
|
||||||
items: list[Dict[str, Any]] = []
|
items: list[Dict[str, Any]] = []
|
||||||
for c in db.query(Currency).filter_by(is_active=True).order_by(Currency.code).all():
|
for c in (
|
||||||
|
db.query(Currency)
|
||||||
|
.filter_by(is_active=True)
|
||||||
|
.order_by(Currency.code)
|
||||||
|
.all()
|
||||||
|
):
|
||||||
items.append(
|
items.append(
|
||||||
{"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol})
|
{"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol}
|
||||||
|
)
|
||||||
if not items:
|
if not items:
|
||||||
items.append({"id": "USD", "name": "US Dollar (USD)", "symbol": "$"})
|
items.append({"id": "USD", "name": "US Dollar (USD)", "symbol": "$"})
|
||||||
return {"currency_options": items}
|
return {"currency_options": items}
|
||||||
@@ -186,6 +195,20 @@ def _load_currency_settings(db: Session) -> Dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_css_settings(db: Session) -> Dict[str, Any]:
|
||||||
|
variables = get_css_color_settings(db)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_rows = list_css_env_override_rows()
|
||||||
|
env_meta = {row["css_key"]: row for row in env_rows}
|
||||||
|
return {
|
||||||
|
"css_variables": variables,
|
||||||
|
"css_defaults": CSS_COLOR_DEFAULTS,
|
||||||
|
"css_env_overrides": env_overrides,
|
||||||
|
"css_env_override_rows": env_rows,
|
||||||
|
"css_env_override_meta": env_meta,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _load_consumption(db: Session) -> Dict[str, Any]:
|
def _load_consumption(db: Session) -> Dict[str, Any]:
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for record in (
|
for record in (
|
||||||
@@ -241,9 +264,7 @@ def _load_production(db: Session) -> Dict[str, Any]:
|
|||||||
def _load_equipment(db: Session) -> Dict[str, Any]:
|
def _load_equipment(db: Session) -> Dict[str, Any]:
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for record in (
|
for record in (
|
||||||
db.query(Equipment)
|
db.query(Equipment).order_by(Equipment.scenario_id, Equipment.id).all()
|
||||||
.order_by(Equipment.scenario_id, Equipment.id)
|
|
||||||
.all()
|
|
||||||
):
|
):
|
||||||
record_id = int(getattr(record, "id"))
|
record_id = int(getattr(record, "id"))
|
||||||
scenario_id = int(getattr(record, "scenario_id"))
|
scenario_id = int(getattr(record, "scenario_id"))
|
||||||
@@ -271,8 +292,9 @@ def _load_maintenance(db: Session) -> Dict[str, Any]:
|
|||||||
scenario_id = int(getattr(record, "scenario_id"))
|
scenario_id = int(getattr(record, "scenario_id"))
|
||||||
equipment_id = int(getattr(record, "equipment_id"))
|
equipment_id = int(getattr(record, "equipment_id"))
|
||||||
equipment_obj = getattr(record, "equipment", None)
|
equipment_obj = getattr(record, "equipment", None)
|
||||||
equipment_name = getattr(
|
equipment_name = (
|
||||||
equipment_obj, "name", "") if equipment_obj else ""
|
getattr(equipment_obj, "name", "") if equipment_obj else ""
|
||||||
|
)
|
||||||
maintenance_date = getattr(record, "maintenance_date", None)
|
maintenance_date = getattr(record, "maintenance_date", None)
|
||||||
cost_value = float(getattr(record, "cost", 0.0))
|
cost_value = float(getattr(record, "cost", 0.0))
|
||||||
description = getattr(record, "description", "") or ""
|
description = getattr(record, "description", "") or ""
|
||||||
@@ -283,7 +305,9 @@ def _load_maintenance(db: Session) -> Dict[str, Any]:
|
|||||||
"scenario_id": scenario_id,
|
"scenario_id": scenario_id,
|
||||||
"equipment_id": equipment_id,
|
"equipment_id": equipment_id,
|
||||||
"equipment_name": equipment_name,
|
"equipment_name": equipment_name,
|
||||||
"maintenance_date": maintenance_date.isoformat() if maintenance_date else "",
|
"maintenance_date": (
|
||||||
|
maintenance_date.isoformat() if maintenance_date else ""
|
||||||
|
),
|
||||||
"cost": cost_value,
|
"cost": cost_value,
|
||||||
"description": description,
|
"description": description,
|
||||||
}
|
}
|
||||||
@@ -319,8 +343,11 @@ def _load_simulations(db: Session) -> Dict[str, Any]:
|
|||||||
for item in scenarios:
|
for item in scenarios:
|
||||||
scenario_id = int(item["id"])
|
scenario_id = int(item["id"])
|
||||||
scenario_results = results_grouped.get(scenario_id, [])
|
scenario_results = results_grouped.get(scenario_id, [])
|
||||||
summary = generate_report(
|
summary = (
|
||||||
scenario_results) if scenario_results else generate_report([])
|
generate_report(scenario_results)
|
||||||
|
if scenario_results
|
||||||
|
else generate_report([])
|
||||||
|
)
|
||||||
runs.append(
|
runs.append(
|
||||||
{
|
{
|
||||||
"scenario_id": scenario_id,
|
"scenario_id": scenario_id,
|
||||||
@@ -375,11 +402,11 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
simulation_context = _load_simulations(db)
|
simulation_context = _load_simulations(db)
|
||||||
simulation_runs = simulation_context["simulation_runs"]
|
simulation_runs = simulation_context["simulation_runs"]
|
||||||
|
|
||||||
runs_by_scenario = {
|
runs_by_scenario = {run["scenario_id"]: run for run in simulation_runs}
|
||||||
run["scenario_id"]: run for run in simulation_runs
|
|
||||||
}
|
|
||||||
|
|
||||||
def sum_amounts(grouped: Dict[int, list[Dict[str, Any]]], field: str = "amount") -> float:
|
def sum_amounts(
|
||||||
|
grouped: Dict[int, list[Dict[str, Any]]], field: str = "amount"
|
||||||
|
) -> float:
|
||||||
total = 0.0
|
total = 0.0
|
||||||
for items in grouped.values():
|
for items in grouped.values():
|
||||||
for item in items:
|
for item in items:
|
||||||
@@ -394,14 +421,18 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
total_production = sum_amounts(production_by_scenario)
|
total_production = sum_amounts(production_by_scenario)
|
||||||
total_maintenance_cost = sum_amounts(maintenance_by_scenario, field="cost")
|
total_maintenance_cost = sum_amounts(maintenance_by_scenario, field="cost")
|
||||||
|
|
||||||
total_parameters = sum(len(items)
|
total_parameters = sum(
|
||||||
for items in parameters_by_scenario.values())
|
len(items) for items in parameters_by_scenario.values()
|
||||||
total_equipment = sum(len(items)
|
)
|
||||||
for items in equipment_by_scenario.values())
|
total_equipment = sum(
|
||||||
total_maintenance_events = sum(len(items)
|
len(items) for items in equipment_by_scenario.values()
|
||||||
for items in maintenance_by_scenario.values())
|
)
|
||||||
|
total_maintenance_events = sum(
|
||||||
|
len(items) for items in maintenance_by_scenario.values()
|
||||||
|
)
|
||||||
total_simulation_iterations = sum(
|
total_simulation_iterations = sum(
|
||||||
run["iterations"] for run in simulation_runs)
|
run["iterations"] for run in simulation_runs
|
||||||
|
)
|
||||||
|
|
||||||
scenario_rows: list[Dict[str, Any]] = []
|
scenario_rows: list[Dict[str, Any]] = []
|
||||||
scenario_labels: list[str] = []
|
scenario_labels: list[str] = []
|
||||||
@@ -481,20 +512,40 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
overall_report = generate_report(all_simulation_results)
|
overall_report = generate_report(all_simulation_results)
|
||||||
|
|
||||||
overall_report_metrics = [
|
overall_report_metrics = [
|
||||||
{"label": "Runs", "value": _format_int(
|
{
|
||||||
int(overall_report.get("count", 0)))},
|
"label": "Runs",
|
||||||
{"label": "Mean", "value": _format_decimal(
|
"value": _format_int(int(overall_report.get("count", 0))),
|
||||||
float(overall_report.get("mean", 0.0)))},
|
},
|
||||||
{"label": "Median", "value": _format_decimal(
|
{
|
||||||
float(overall_report.get("median", 0.0)))},
|
"label": "Mean",
|
||||||
{"label": "Std Dev", "value": _format_decimal(
|
"value": _format_decimal(float(overall_report.get("mean", 0.0))),
|
||||||
float(overall_report.get("std_dev", 0.0)))},
|
},
|
||||||
{"label": "95th Percentile", "value": _format_decimal(
|
{
|
||||||
float(overall_report.get("percentile_95", 0.0)))},
|
"label": "Median",
|
||||||
{"label": "VaR (95%)", "value": _format_decimal(
|
"value": _format_decimal(float(overall_report.get("median", 0.0))),
|
||||||
float(overall_report.get("value_at_risk_95", 0.0)))},
|
},
|
||||||
{"label": "Expected Shortfall (95%)", "value": _format_decimal(
|
{
|
||||||
float(overall_report.get("expected_shortfall_95", 0.0)))},
|
"label": "Std Dev",
|
||||||
|
"value": _format_decimal(float(overall_report.get("std_dev", 0.0))),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "95th Percentile",
|
||||||
|
"value": _format_decimal(
|
||||||
|
float(overall_report.get("percentile_95", 0.0))
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "VaR (95%)",
|
||||||
|
"value": _format_decimal(
|
||||||
|
float(overall_report.get("value_at_risk_95", 0.0))
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Expected Shortfall (95%)",
|
||||||
|
"value": _format_decimal(
|
||||||
|
float(overall_report.get("expected_shortfall_95", 0.0))
|
||||||
|
),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
recent_simulations: list[Dict[str, Any]] = [
|
recent_simulations: list[Dict[str, Any]] = [
|
||||||
@@ -502,8 +553,12 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
"scenario_name": run["scenario_name"],
|
"scenario_name": run["scenario_name"],
|
||||||
"iterations": run["iterations"],
|
"iterations": run["iterations"],
|
||||||
"iterations_display": _format_int(run["iterations"]),
|
"iterations_display": _format_int(run["iterations"]),
|
||||||
"mean_display": _format_decimal(float(run["summary"].get("mean", 0.0))),
|
"mean_display": _format_decimal(
|
||||||
"p95_display": _format_decimal(float(run["summary"].get("percentile_95", 0.0))),
|
float(run["summary"].get("mean", 0.0))
|
||||||
|
),
|
||||||
|
"p95_display": _format_decimal(
|
||||||
|
float(run["summary"].get("percentile_95", 0.0))
|
||||||
|
),
|
||||||
}
|
}
|
||||||
for run in simulation_runs
|
for run in simulation_runs
|
||||||
if run["iterations"] > 0
|
if run["iterations"] > 0
|
||||||
@@ -521,10 +576,20 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
maintenance_date = getattr(record, "maintenance_date", None)
|
maintenance_date = getattr(record, "maintenance_date", None)
|
||||||
upcoming_maintenance.append(
|
upcoming_maintenance.append(
|
||||||
{
|
{
|
||||||
"scenario_name": getattr(getattr(record, "scenario", None), "name", "Unknown"),
|
"scenario_name": getattr(
|
||||||
"equipment_name": getattr(getattr(record, "equipment", None), "name", "Unknown"),
|
getattr(record, "scenario", None), "name", "Unknown"
|
||||||
"date_display": maintenance_date.strftime("%Y-%m-%d") if maintenance_date else "—",
|
),
|
||||||
"cost_display": _format_currency(float(getattr(record, "cost", 0.0))),
|
"equipment_name": getattr(
|
||||||
|
getattr(record, "equipment", None), "name", "Unknown"
|
||||||
|
),
|
||||||
|
"date_display": (
|
||||||
|
maintenance_date.strftime("%Y-%m-%d")
|
||||||
|
if maintenance_date
|
||||||
|
else "—"
|
||||||
|
),
|
||||||
|
"cost_display": _format_currency(
|
||||||
|
float(getattr(record, "cost", 0.0))
|
||||||
|
),
|
||||||
"description": getattr(record, "description", "") or "—",
|
"description": getattr(record, "description", "") or "—",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -532,9 +597,9 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
cost_chart_has_data = any(value > 0 for value in scenario_capex) or any(
|
cost_chart_has_data = any(value > 0 for value in scenario_capex) or any(
|
||||||
value > 0 for value in scenario_opex
|
value > 0 for value in scenario_opex
|
||||||
)
|
)
|
||||||
activity_chart_has_data = any(value > 0 for value in activity_production) or any(
|
activity_chart_has_data = any(
|
||||||
value > 0 for value in activity_consumption
|
value > 0 for value in activity_production
|
||||||
)
|
) or any(value > 0 for value in activity_consumption)
|
||||||
|
|
||||||
scenario_cost_chart: Dict[str, list[Any]] = {
|
scenario_cost_chart: Dict[str, list[Any]] = {
|
||||||
"labels": scenario_labels,
|
"labels": scenario_labels,
|
||||||
@@ -553,14 +618,20 @@ def _load_dashboard(db: Session) -> Dict[str, Any]:
|
|||||||
{"label": "CAPEX Total", "value": _format_currency(total_capex)},
|
{"label": "CAPEX Total", "value": _format_currency(total_capex)},
|
||||||
{"label": "OPEX Total", "value": _format_currency(total_opex)},
|
{"label": "OPEX Total", "value": _format_currency(total_opex)},
|
||||||
{"label": "Equipment Assets", "value": _format_int(total_equipment)},
|
{"label": "Equipment Assets", "value": _format_int(total_equipment)},
|
||||||
{"label": "Maintenance Events",
|
{
|
||||||
"value": _format_int(total_maintenance_events)},
|
"label": "Maintenance Events",
|
||||||
|
"value": _format_int(total_maintenance_events),
|
||||||
|
},
|
||||||
{"label": "Consumption", "value": _format_decimal(total_consumption)},
|
{"label": "Consumption", "value": _format_decimal(total_consumption)},
|
||||||
{"label": "Production", "value": _format_decimal(total_production)},
|
{"label": "Production", "value": _format_decimal(total_production)},
|
||||||
{"label": "Simulation Iterations",
|
{
|
||||||
"value": _format_int(total_simulation_iterations)},
|
"label": "Simulation Iterations",
|
||||||
{"label": "Maintenance Cost",
|
"value": _format_int(total_simulation_iterations),
|
||||||
"value": _format_currency(total_maintenance_cost)},
|
},
|
||||||
|
{
|
||||||
|
"label": "Maintenance Cost",
|
||||||
|
"value": _format_currency(total_maintenance_cost),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -672,8 +743,42 @@ async def reporting_view(request: Request, db: Session = Depends(get_db)):
|
|||||||
return _render(request, "reporting.html", _load_reporting(db))
|
return _render(request, "reporting.html", _load_reporting(db))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/ui/settings", response_class=HTMLResponse)
|
||||||
|
async def settings_view(request: Request, db: Session = Depends(get_db)):
|
||||||
|
"""Render the settings landing page."""
|
||||||
|
context = _load_css_settings(db)
|
||||||
|
return _render(request, "settings.html", context)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ui/currencies", response_class=HTMLResponse)
|
@router.get("/ui/currencies", response_class=HTMLResponse)
|
||||||
async def currencies_view(request: Request, db: Session = Depends(get_db)):
|
async def currencies_view(request: Request, db: Session = Depends(get_db)):
|
||||||
"""Render the currency administration page with full currency context."""
|
"""Render the currency administration page with full currency context."""
|
||||||
context = _load_currency_settings(db)
|
context = _load_currency_settings(db)
|
||||||
return _render(request, "currencies.html", context)
|
return _render(request, "currencies.html", context)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/login", response_class=HTMLResponse)
|
||||||
|
async def login_page(request: Request):
|
||||||
|
return _render(request, "login.html")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/register", response_class=HTMLResponse)
|
||||||
|
async def register_page(request: Request):
|
||||||
|
return _render(request, "register.html")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/profile", response_class=HTMLResponse)
|
||||||
|
async def profile_page(request: Request):
|
||||||
|
return _render(request, "profile.html")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/forgot-password", response_class=HTMLResponse)
|
||||||
|
async def forgot_password_page(request: Request):
|
||||||
|
return _render(request, "forgot_password.html")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/theme-settings", response_class=HTMLResponse)
|
||||||
|
async def theme_settings_page(request: Request, db: Session = Depends(get_db)):
|
||||||
|
"""Render the theme settings page."""
|
||||||
|
context = _load_css_settings(db)
|
||||||
|
return _render(request, "theme_settings.html", context)
|
||||||
|
|||||||
126
routes/users.py
Normal file
126
routes/users.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from config.database import get_db
|
||||||
|
from models.user import User
|
||||||
|
from services.security import get_password_hash, verify_password, create_access_token, SECRET_KEY, ALGORITHM
|
||||||
|
from jose import jwt, JWTError
|
||||||
|
from schemas.user import UserCreate, UserInDB, UserLogin, UserUpdate, PasswordResetRequest, PasswordReset, Token
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/users", tags=["users"])
|
||||||
|
|
||||||
|
|
||||||
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="users/login")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)):
|
||||||
|
credentials_exception = HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
username: str = payload.get("sub")
|
||||||
|
if username is None:
|
||||||
|
raise credentials_exception
|
||||||
|
if username is None:
|
||||||
|
raise credentials_exception
|
||||||
|
except JWTError:
|
||||||
|
raise credentials_exception
|
||||||
|
user = db.query(User).filter(User.username == username).first()
|
||||||
|
if user is None:
|
||||||
|
raise credentials_exception
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/register", response_model=UserInDB, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def register_user(user: UserCreate, db: Session = Depends(get_db)):
|
||||||
|
db_user = db.query(User).filter(User.username == user.username).first()
|
||||||
|
if db_user:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Username already registered")
|
||||||
|
db_user = db.query(User).filter(User.email == user.email).first()
|
||||||
|
if db_user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
||||||
|
|
||||||
|
# Get or create default role
|
||||||
|
from models.role import Role
|
||||||
|
default_role = db.query(Role).filter(Role.name == "user").first()
|
||||||
|
if not default_role:
|
||||||
|
default_role = Role(name="user")
|
||||||
|
db.add(default_role)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(default_role)
|
||||||
|
|
||||||
|
new_user = User(username=user.username, email=user.email,
|
||||||
|
role_id=default_role.id)
|
||||||
|
new_user.set_password(user.password)
|
||||||
|
db.add(new_user)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(new_user)
|
||||||
|
return new_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/login")
|
||||||
|
async def login_user(user: UserLogin, db: Session = Depends(get_db)):
|
||||||
|
db_user = db.query(User).filter(User.username == user.username).first()
|
||||||
|
if not db_user or not db_user.check_password(user.password):
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect username or password")
|
||||||
|
access_token = create_access_token(subject=db_user.username)
|
||||||
|
return {"access_token": access_token, "token_type": "bearer"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/me")
|
||||||
|
async def read_users_me(current_user: User = Depends(get_current_user)):
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/me", response_model=UserInDB)
|
||||||
|
async def update_user_me(user_update: UserUpdate, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||||
|
if user_update.username and user_update.username != current_user.username:
|
||||||
|
existing_user = db.query(User).filter(
|
||||||
|
User.username == user_update.username).first()
|
||||||
|
if existing_user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Username already taken")
|
||||||
|
current_user.username = user_update.username
|
||||||
|
|
||||||
|
if user_update.email and user_update.email != current_user.email:
|
||||||
|
existing_user = db.query(User).filter(
|
||||||
|
User.email == user_update.email).first()
|
||||||
|
if existing_user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
||||||
|
current_user.email = user_update.email
|
||||||
|
|
||||||
|
if user_update.password:
|
||||||
|
current_user.set_password(user_update.password)
|
||||||
|
|
||||||
|
db.add(current_user)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(current_user)
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/forgot-password")
|
||||||
|
async def forgot_password(request: PasswordResetRequest):
|
||||||
|
# In a real application, this would send an email with a reset token
|
||||||
|
return {"message": "Password reset email sent (not really)"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/reset-password")
|
||||||
|
async def reset_password(request: PasswordReset, db: Session = Depends(get_db)):
|
||||||
|
# In a real application, the token would be verified
|
||||||
|
user = db.query(User).filter(User.username ==
|
||||||
|
request.token).first() # Use token as username for test
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid token or user")
|
||||||
|
user.set_password(request.new_password)
|
||||||
|
db.add(user)
|
||||||
|
db.commit()
|
||||||
|
return {"message": "Password has been reset successfully"}
|
||||||
41
schemas/user.py
Normal file
41
schemas/user.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class UserCreate(BaseModel):
|
||||||
|
username: str
|
||||||
|
email: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserInDB(BaseModel):
|
||||||
|
id: int
|
||||||
|
username: str
|
||||||
|
email: str
|
||||||
|
role_id: int
|
||||||
|
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserUpdate(BaseModel):
|
||||||
|
username: str | None = None
|
||||||
|
email: str | None = None
|
||||||
|
password: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetRequest(BaseModel):
|
||||||
|
email: str
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordReset(BaseModel):
|
||||||
|
token: str
|
||||||
|
new_password: str
|
||||||
|
|
||||||
|
|
||||||
|
class Token(BaseModel):
|
||||||
|
access_token: str
|
||||||
|
token_type: str
|
||||||
@@ -9,6 +9,7 @@ This script is intentionally cautious: it defaults to dry-run mode and will refu
|
|||||||
if database connection settings are missing. It supports creating missing currency rows when `--create-missing`
|
if database connection settings are missing. It supports creating missing currency rows when `--create-missing`
|
||||||
is provided. Always run against a development/staging database first.
|
is provided. Always run against a development/staging database first.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import argparse
|
import argparse
|
||||||
import importlib
|
import importlib
|
||||||
@@ -36,26 +37,43 @@ def load_database_url() -> str:
|
|||||||
return getattr(db_module, "DATABASE_URL")
|
return getattr(db_module, "DATABASE_URL")
|
||||||
|
|
||||||
|
|
||||||
def backfill(db_url: str, dry_run: bool = True, create_missing: bool = False) -> None:
|
def backfill(
|
||||||
|
db_url: str, dry_run: bool = True, create_missing: bool = False
|
||||||
|
) -> None:
|
||||||
engine = create_engine(db_url)
|
engine = create_engine(db_url)
|
||||||
with engine.begin() as conn:
|
with engine.begin() as conn:
|
||||||
# Ensure currency table exists
|
# Ensure currency table exists
|
||||||
res = conn.execute(text("SELECT name FROM sqlite_master WHERE type='table' AND name='currency';")) if db_url.startswith(
|
res = (
|
||||||
'sqlite:') else conn.execute(text("SELECT to_regclass('public.currency');"))
|
conn.execute(
|
||||||
|
text(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='currency';"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if db_url.startswith("sqlite:")
|
||||||
|
else conn.execute(text("SELECT to_regclass('public.currency');"))
|
||||||
|
)
|
||||||
# Note: we don't strictly depend on the above - we assume migration was already applied
|
# Note: we don't strictly depend on the above - we assume migration was already applied
|
||||||
|
|
||||||
# Helper: find or create currency by code
|
# Helper: find or create currency by code
|
||||||
def find_currency_id(code: str):
|
def find_currency_id(code: str):
|
||||||
r = conn.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
r = conn.execute(
|
||||||
"code": code}).fetchone()
|
text("SELECT id FROM currency WHERE code = :code"),
|
||||||
|
{"code": code},
|
||||||
|
).fetchone()
|
||||||
if r:
|
if r:
|
||||||
return r[0]
|
return r[0]
|
||||||
if create_missing:
|
if create_missing:
|
||||||
# insert and return id
|
# insert and return id
|
||||||
conn.execute(text("INSERT INTO currency (code, name, symbol, is_active) VALUES (:c, :n, NULL, TRUE)"), {
|
conn.execute(
|
||||||
"c": code, "n": code})
|
text(
|
||||||
r2 = conn.execute(text("SELECT id FROM currency WHERE code = :code"), {
|
"INSERT INTO currency (code, name, symbol, is_active) VALUES (:c, :n, NULL, TRUE)"
|
||||||
"code": code}).fetchone()
|
),
|
||||||
|
{"c": code, "n": code},
|
||||||
|
)
|
||||||
|
r2 = conn.execute(
|
||||||
|
text("SELECT id FROM currency WHERE code = :code"),
|
||||||
|
{"code": code},
|
||||||
|
).fetchone()
|
||||||
if not r2:
|
if not r2:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Unable to determine currency ID for '{code}' after insert"
|
f"Unable to determine currency ID for '{code}' after insert"
|
||||||
@@ -67,8 +85,15 @@ def backfill(db_url: str, dry_run: bool = True, create_missing: bool = False) ->
|
|||||||
for table in ("capex", "opex"):
|
for table in ("capex", "opex"):
|
||||||
# Check if currency_id column exists
|
# Check if currency_id column exists
|
||||||
try:
|
try:
|
||||||
cols = conn.execute(text(f"SELECT 1 FROM information_schema.columns WHERE table_name = '{table}' AND column_name = 'currency_id'")) if not db_url.startswith(
|
cols = (
|
||||||
'sqlite:') else [(1,)]
|
conn.execute(
|
||||||
|
text(
|
||||||
|
f"SELECT 1 FROM information_schema.columns WHERE table_name = '{table}' AND column_name = 'currency_id'"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not db_url.startswith("sqlite:")
|
||||||
|
else [(1,)]
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
cols = [(1,)]
|
cols = [(1,)]
|
||||||
|
|
||||||
@@ -77,8 +102,11 @@ def backfill(db_url: str, dry_run: bool = True, create_missing: bool = False) ->
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Find rows where currency_id IS NULL but currency_code exists
|
# Find rows where currency_id IS NULL but currency_code exists
|
||||||
rows = conn.execute(text(
|
rows = conn.execute(
|
||||||
f"SELECT id, currency_code FROM {table} WHERE currency_id IS NULL OR currency_id = ''"))
|
text(
|
||||||
|
f"SELECT id, currency_code FROM {table} WHERE currency_id IS NULL OR currency_id = ''"
|
||||||
|
)
|
||||||
|
)
|
||||||
changed = 0
|
changed = 0
|
||||||
for r in rows:
|
for r in rows:
|
||||||
rid = r[0]
|
rid = r[0]
|
||||||
@@ -86,14 +114,20 @@ def backfill(db_url: str, dry_run: bool = True, create_missing: bool = False) ->
|
|||||||
cid = find_currency_id(code)
|
cid = find_currency_id(code)
|
||||||
if cid is None:
|
if cid is None:
|
||||||
print(
|
print(
|
||||||
f"Row {table}:{rid} has unknown currency code '{code}' and create_missing=False; skipping")
|
f"Row {table}:{rid} has unknown currency code '{code}' and create_missing=False; skipping"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if dry_run:
|
if dry_run:
|
||||||
print(
|
print(
|
||||||
f"[DRY RUN] Would set {table}.currency_id = {cid} for row id={rid} (code={code})")
|
f"[DRY RUN] Would set {table}.currency_id = {cid} for row id={rid} (code={code})"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
conn.execute(text(f"UPDATE {table} SET currency_id = :cid WHERE id = :rid"), {
|
conn.execute(
|
||||||
"cid": cid, "rid": rid})
|
text(
|
||||||
|
f"UPDATE {table} SET currency_id = :cid WHERE id = :rid"
|
||||||
|
),
|
||||||
|
{"cid": cid, "rid": rid},
|
||||||
|
)
|
||||||
changed += 1
|
changed += 1
|
||||||
|
|
||||||
print(f"{table}: processed, changed={changed} (dry_run={dry_run})")
|
print(f"{table}: processed, changed={changed} (dry_run={dry_run})")
|
||||||
@@ -101,11 +135,19 @@ def backfill(db_url: str, dry_run: bool = True, create_missing: bool = False) ->
|
|||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Backfill currency_id from currency_code for capex/opex tables")
|
description="Backfill currency_id from currency_code for capex/opex tables"
|
||||||
parser.add_argument("--dry-run", action="store_true",
|
)
|
||||||
default=True, help="Show actions without writing")
|
parser.add_argument(
|
||||||
parser.add_argument("--create-missing", action="store_true",
|
"--dry-run",
|
||||||
help="Create missing currency rows in the currency table")
|
action="store_true",
|
||||||
|
default=True,
|
||||||
|
help="Show actions without writing",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--create-missing",
|
||||||
|
action="store_true",
|
||||||
|
help="Create missing currency rows in the currency table",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
db = load_database_url()
|
db = load_database_url()
|
||||||
|
|||||||
@@ -4,25 +4,30 @@ Checks only local file links (relative paths) and reports missing targets.
|
|||||||
|
|
||||||
Run from the repository root using the project's Python environment.
|
Run from the repository root using the project's Python environment.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
ROOT = Path(__file__).resolve().parent.parent
|
ROOT = Path(__file__).resolve().parent.parent
|
||||||
DOCS = ROOT / 'docs'
|
DOCS = ROOT / "docs"
|
||||||
|
|
||||||
MD_LINK_RE = re.compile(r"\[([^\]]+)\]\(([^)]+)\)")
|
MD_LINK_RE = re.compile(r"\[([^\]]+)\]\(([^)]+)\)")
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
for md in DOCS.rglob('*.md'):
|
for md in DOCS.rglob("*.md"):
|
||||||
text = md.read_text(encoding='utf-8')
|
text = md.read_text(encoding="utf-8")
|
||||||
for m in MD_LINK_RE.finditer(text):
|
for m in MD_LINK_RE.finditer(text):
|
||||||
label, target = m.groups()
|
label, target = m.groups()
|
||||||
# skip URLs
|
# skip URLs
|
||||||
if target.startswith('http://') or target.startswith('https://') or target.startswith('#'):
|
if (
|
||||||
|
target.startswith("http://")
|
||||||
|
or target.startswith("https://")
|
||||||
|
or target.startswith("#")
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
# strip anchors
|
# strip anchors
|
||||||
target_path = target.split('#')[0]
|
target_path = target.split("#")[0]
|
||||||
# if link is to a directory index, allow
|
# if link is to a directory index, allow
|
||||||
candidate = (md.parent / target_path).resolve()
|
candidate = (md.parent / target_path).resolve()
|
||||||
if candidate.exists():
|
if candidate.exists():
|
||||||
@@ -30,14 +35,16 @@ for md in DOCS.rglob('*.md'):
|
|||||||
# check common implicit index: target/ -> target/README.md or target/index.md
|
# check common implicit index: target/ -> target/README.md or target/index.md
|
||||||
candidate_dir = md.parent / target_path
|
candidate_dir = md.parent / target_path
|
||||||
if candidate_dir.is_dir():
|
if candidate_dir.is_dir():
|
||||||
if (candidate_dir / 'README.md').exists() or (candidate_dir / 'index.md').exists():
|
if (candidate_dir / "README.md").exists() or (
|
||||||
|
candidate_dir / "index.md"
|
||||||
|
).exists():
|
||||||
continue
|
continue
|
||||||
errors.append((str(md.relative_to(ROOT)), target, label))
|
errors.append((str(md.relative_to(ROOT)), target, label))
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
print('Broken local links found:')
|
print("Broken local links found:")
|
||||||
for src, tgt, label in errors:
|
for src, tgt, label in errors:
|
||||||
print(f'- {src} -> {tgt} ({label})')
|
print(f"- {src} -> {tgt} ({label})")
|
||||||
exit(2)
|
exit(2)
|
||||||
|
|
||||||
print('No broken local links detected.')
|
print("No broken local links detected.")
|
||||||
|
|||||||
@@ -2,16 +2,17 @@
|
|||||||
|
|
||||||
This is intentionally small and non-destructive; it touches only files under docs/ and makes safe changes.
|
This is intentionally small and non-destructive; it touches only files under docs/ and makes safe changes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
DOCS = Path(__file__).resolve().parents[1] / "docs"
|
DOCS = Path(__file__).resolve().parents[1] / "docs"
|
||||||
|
|
||||||
CODE_LANG_HINTS = {
|
CODE_LANG_HINTS = {
|
||||||
'powershell': ('powershell',),
|
"powershell": ("powershell",),
|
||||||
'bash': ('bash', 'sh'),
|
"bash": ("bash", "sh"),
|
||||||
'sql': ('sql',),
|
"sql": ("sql",),
|
||||||
'python': ('python',),
|
"python": ("python",),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -19,48 +20,60 @@ def add_code_fence_language(match):
|
|||||||
fence = match.group(0)
|
fence = match.group(0)
|
||||||
inner = match.group(1)
|
inner = match.group(1)
|
||||||
# If language already present, return unchanged
|
# If language already present, return unchanged
|
||||||
if fence.startswith('```') and len(fence.splitlines()[0].strip()) > 3:
|
if fence.startswith("```") and len(fence.splitlines()[0].strip()) > 3:
|
||||||
return fence
|
return fence
|
||||||
# Try to infer language from the code content
|
# Try to infer language from the code content
|
||||||
code = inner.strip().splitlines()[0] if inner.strip() else ''
|
code = inner.strip().splitlines()[0] if inner.strip() else ""
|
||||||
lang = ''
|
lang = ""
|
||||||
if code.startswith('$') or code.startswith('PS') or code.lower().startswith('powershell'):
|
if (
|
||||||
lang = 'powershell'
|
code.startswith("$")
|
||||||
elif code.startswith('#') or code.startswith('import') or code.startswith('from'):
|
or code.startswith("PS")
|
||||||
lang = 'python'
|
or code.lower().startswith("powershell")
|
||||||
elif re.match(r'^(select|insert|update|create)\b', code.strip(), re.I):
|
):
|
||||||
lang = 'sql'
|
lang = "powershell"
|
||||||
elif code.startswith('git') or code.startswith('./') or code.startswith('sudo'):
|
elif (
|
||||||
lang = 'bash'
|
code.startswith("#")
|
||||||
|
or code.startswith("import")
|
||||||
|
or code.startswith("from")
|
||||||
|
):
|
||||||
|
lang = "python"
|
||||||
|
elif re.match(r"^(select|insert|update|create)\b", code.strip(), re.I):
|
||||||
|
lang = "sql"
|
||||||
|
elif (
|
||||||
|
code.startswith("git")
|
||||||
|
or code.startswith("./")
|
||||||
|
or code.startswith("sudo")
|
||||||
|
):
|
||||||
|
lang = "bash"
|
||||||
if lang:
|
if lang:
|
||||||
return f'```{lang}\n{inner}\n```'
|
return f"```{lang}\n{inner}\n```"
|
||||||
return fence
|
return fence
|
||||||
|
|
||||||
|
|
||||||
def normalize_file(path: Path):
|
def normalize_file(path: Path):
|
||||||
text = path.read_text(encoding='utf-8')
|
text = path.read_text(encoding="utf-8")
|
||||||
orig = text
|
orig = text
|
||||||
# Trim trailing whitespace and ensure single trailing newline
|
# Trim trailing whitespace and ensure single trailing newline
|
||||||
text = '\n'.join(line.rstrip() for line in text.splitlines()) + '\n'
|
text = "\n".join(line.rstrip() for line in text.splitlines()) + "\n"
|
||||||
# Ensure first non-empty line is H1
|
# Ensure first non-empty line is H1
|
||||||
lines = text.splitlines()
|
lines = text.splitlines()
|
||||||
for i, ln in enumerate(lines):
|
for i, ln in enumerate(lines):
|
||||||
if ln.strip():
|
if ln.strip():
|
||||||
if not ln.startswith('#'):
|
if not ln.startswith("#"):
|
||||||
lines[i] = '# ' + ln
|
lines[i] = "# " + ln
|
||||||
break
|
break
|
||||||
text = '\n'.join(lines) + '\n'
|
text = "\n".join(lines) + "\n"
|
||||||
# Add basic code fence languages where missing (simple heuristic)
|
# Add basic code fence languages where missing (simple heuristic)
|
||||||
text = re.sub(r'```\n([\s\S]*?)\n```', add_code_fence_language, text)
|
text = re.sub(r"```\n([\s\S]*?)\n```", add_code_fence_language, text)
|
||||||
if text != orig:
|
if text != orig:
|
||||||
path.write_text(text, encoding='utf-8')
|
path.write_text(text, encoding="utf-8")
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
changed = []
|
changed = []
|
||||||
for p in DOCS.rglob('*.md'):
|
for p in DOCS.rglob("*.md"):
|
||||||
if p.is_file():
|
if p.is_file():
|
||||||
try:
|
try:
|
||||||
if normalize_file(p):
|
if normalize_file(p):
|
||||||
@@ -68,12 +81,12 @@ def main():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Failed to format {p}: {e}")
|
print(f"Failed to format {p}: {e}")
|
||||||
if changed:
|
if changed:
|
||||||
print('Formatted files:')
|
print("Formatted files:")
|
||||||
for c in changed:
|
for c in changed:
|
||||||
print(' -', c)
|
print(" -", c)
|
||||||
else:
|
else:
|
||||||
print('No formatting changes required.')
|
print("No formatting changes required.")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -27,6 +27,25 @@ SET name = EXCLUDED.name,
|
|||||||
symbol = EXCLUDED.symbol,
|
symbol = EXCLUDED.symbol,
|
||||||
is_active = EXCLUDED.is_active;
|
is_active = EXCLUDED.is_active;
|
||||||
|
|
||||||
|
-- Application-level settings table
|
||||||
|
CREATE TABLE IF NOT EXISTS application_setting (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
key VARCHAR(128) NOT NULL UNIQUE,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
value_type VARCHAR(32) NOT NULL DEFAULT 'string',
|
||||||
|
category VARCHAR(32) NOT NULL DEFAULT 'general',
|
||||||
|
description TEXT,
|
||||||
|
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS ux_application_setting_key
|
||||||
|
ON application_setting (key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_application_setting_category
|
||||||
|
ON application_setting (category);
|
||||||
|
|
||||||
-- Measurement unit reference table
|
-- Measurement unit reference table
|
||||||
CREATE TABLE IF NOT EXISTS measurement_unit (
|
CREATE TABLE IF NOT EXISTS measurement_unit (
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
|
|||||||
@@ -0,0 +1,25 @@
|
|||||||
|
-- Migration: Create application_setting table for configurable application options
|
||||||
|
-- Date: 2025-10-25
|
||||||
|
-- Description: Introduces persistent storage for application-level settings such as theme colors.
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS application_setting (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
key VARCHAR(128) NOT NULL UNIQUE,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
value_type VARCHAR(32) NOT NULL DEFAULT 'string',
|
||||||
|
category VARCHAR(32) NOT NULL DEFAULT 'general',
|
||||||
|
description TEXT,
|
||||||
|
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS ux_application_setting_key
|
||||||
|
ON application_setting (key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_application_setting_category
|
||||||
|
ON application_setting (category);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
11
scripts/migrations/20251027_create_theme_settings_table.sql
Normal file
11
scripts/migrations/20251027_create_theme_settings_table.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
-- Migration: 20251027_create_theme_settings_table.sql
|
||||||
|
|
||||||
|
CREATE TABLE theme_settings (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
theme_name VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
primary_color VARCHAR(7) NOT NULL,
|
||||||
|
secondary_color VARCHAR(7) NOT NULL,
|
||||||
|
accent_color VARCHAR(7) NOT NULL,
|
||||||
|
background_color VARCHAR(7) NOT NULL,
|
||||||
|
text_color VARCHAR(7) NOT NULL
|
||||||
|
);
|
||||||
15
scripts/migrations/20251027_create_user_and_role_tables.sql
Normal file
15
scripts/migrations/20251027_create_user_and_role_tables.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- Migration: 20251027_create_user_and_role_tables.sql
|
||||||
|
|
||||||
|
CREATE TABLE roles (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) UNIQUE NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
email VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
hashed_password VARCHAR(255) NOT NULL,
|
||||||
|
role_id INTEGER NOT NULL,
|
||||||
|
FOREIGN KEY (role_id) REFERENCES roles(id)
|
||||||
|
);
|
||||||
@@ -47,22 +47,82 @@ MEASUREMENT_UNIT_SEEDS = (
|
|||||||
("kilowatt_hours", "Kilowatt Hours", "kWh", "energy", True),
|
("kilowatt_hours", "Kilowatt Hours", "kWh", "energy", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
THEME_SETTING_SEEDS = (
|
||||||
|
("--color-background", "#f4f5f7", "color",
|
||||||
|
"theme", "CSS variable --color-background", True),
|
||||||
|
("--color-surface", "#ffffff", "color",
|
||||||
|
"theme", "CSS variable --color-surface", True),
|
||||||
|
("--color-text-primary", "#2a1f33", "color",
|
||||||
|
"theme", "CSS variable --color-text-primary", True),
|
||||||
|
("--color-text-secondary", "#624769", "color",
|
||||||
|
"theme", "CSS variable --color-text-secondary", True),
|
||||||
|
("--color-text-muted", "#64748b", "color",
|
||||||
|
"theme", "CSS variable --color-text-muted", True),
|
||||||
|
("--color-text-subtle", "#94a3b8", "color",
|
||||||
|
"theme", "CSS variable --color-text-subtle", True),
|
||||||
|
("--color-text-invert", "#ffffff", "color",
|
||||||
|
"theme", "CSS variable --color-text-invert", True),
|
||||||
|
("--color-text-dark", "#0f172a", "color",
|
||||||
|
"theme", "CSS variable --color-text-dark", True),
|
||||||
|
("--color-text-strong", "#111827", "color",
|
||||||
|
"theme", "CSS variable --color-text-strong", True),
|
||||||
|
("--color-primary", "#5f320d", "color",
|
||||||
|
"theme", "CSS variable --color-primary", True),
|
||||||
|
("--color-primary-strong", "#7e4c13", "color",
|
||||||
|
"theme", "CSS variable --color-primary-strong", True),
|
||||||
|
("--color-primary-stronger", "#837c15", "color",
|
||||||
|
"theme", "CSS variable --color-primary-stronger", True),
|
||||||
|
("--color-accent", "#bff838", "color",
|
||||||
|
"theme", "CSS variable --color-accent", True),
|
||||||
|
("--color-border", "#e2e8f0", "color",
|
||||||
|
"theme", "CSS variable --color-border", True),
|
||||||
|
("--color-border-strong", "#cbd5e1", "color",
|
||||||
|
"theme", "CSS variable --color-border-strong", True),
|
||||||
|
("--color-highlight", "#eef2ff", "color",
|
||||||
|
"theme", "CSS variable --color-highlight", True),
|
||||||
|
("--color-panel-shadow", "rgba(15, 23, 42, 0.08)", "color",
|
||||||
|
"theme", "CSS variable --color-panel-shadow", True),
|
||||||
|
("--color-panel-shadow-deep", "rgba(15, 23, 42, 0.12)", "color",
|
||||||
|
"theme", "CSS variable --color-panel-shadow-deep", True),
|
||||||
|
("--color-surface-alt", "#f8fafc", "color",
|
||||||
|
"theme", "CSS variable --color-surface-alt", True),
|
||||||
|
("--color-success", "#047857", "color",
|
||||||
|
"theme", "CSS variable --color-success", True),
|
||||||
|
("--color-error", "#b91c1c", "color",
|
||||||
|
"theme", "CSS variable --color-error", True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
def parse_args() -> argparse.Namespace:
|
||||||
parser = argparse.ArgumentParser(description="Seed baseline CalMiner data")
|
parser = argparse.ArgumentParser(description="Seed baseline CalMiner data")
|
||||||
parser.add_argument("--currencies", action="store_true", help="Seed currency table")
|
|
||||||
parser.add_argument("--units", action="store_true", help="Seed unit table")
|
|
||||||
parser.add_argument("--defaults", action="store_true", help="Seed default records")
|
|
||||||
parser.add_argument("--dry-run", action="store_true", help="Print actions without executing")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--verbose", "-v", action="count", default=0, help="Increase logging verbosity"
|
"--currencies", action="store_true", help="Seed currency table"
|
||||||
|
)
|
||||||
|
parser.add_argument("--units", action="store_true", help="Seed unit table")
|
||||||
|
parser.add_argument(
|
||||||
|
"--theme", action="store_true", help="Seed theme settings"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--defaults", action="store_true", help="Seed default records"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Print actions without executing"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--verbose",
|
||||||
|
"-v",
|
||||||
|
action="count",
|
||||||
|
default=0,
|
||||||
|
help="Increase logging verbosity",
|
||||||
)
|
)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def _configure_logging(args: argparse.Namespace) -> None:
|
def _configure_logging(args: argparse.Namespace) -> None:
|
||||||
level = logging.WARNING - (10 * min(args.verbose, 2))
|
level = logging.WARNING - (10 * min(args.verbose, 2))
|
||||||
logging.basicConfig(level=max(level, logging.INFO), format="%(levelname)s %(message)s")
|
logging.basicConfig(
|
||||||
|
level=max(level, logging.INFO), format="%(levelname)s %(message)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
@@ -77,7 +137,7 @@ def run_with_namespace(
|
|||||||
) -> None:
|
) -> None:
|
||||||
_configure_logging(args)
|
_configure_logging(args)
|
||||||
|
|
||||||
if not any((args.currencies, args.units, args.defaults)):
|
if not any((args.currencies, args.units, args.theme, args.defaults)):
|
||||||
logger.info("No seeding options provided; exiting")
|
logger.info("No seeding options provided; exiting")
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -89,6 +149,8 @@ def run_with_namespace(
|
|||||||
_seed_currencies(cursor, dry_run=args.dry_run)
|
_seed_currencies(cursor, dry_run=args.dry_run)
|
||||||
if args.units:
|
if args.units:
|
||||||
_seed_units(cursor, dry_run=args.dry_run)
|
_seed_units(cursor, dry_run=args.dry_run)
|
||||||
|
if args.theme:
|
||||||
|
_seed_theme(cursor, dry_run=args.dry_run)
|
||||||
if args.defaults:
|
if args.defaults:
|
||||||
_seed_defaults(cursor, dry_run=args.dry_run)
|
_seed_defaults(cursor, dry_run=args.dry_run)
|
||||||
|
|
||||||
@@ -152,11 +214,44 @@ def _seed_units(cursor, *, dry_run: bool) -> None:
|
|||||||
logger.info("Measurement unit seed complete")
|
logger.info("Measurement unit seed complete")
|
||||||
|
|
||||||
|
|
||||||
def _seed_defaults(cursor, *, dry_run: bool) -> None:
|
def _seed_theme(cursor, *, dry_run: bool) -> None:
|
||||||
logger.info("Seeding default records - not yet implemented")
|
logger.info("Seeding theme settings (%d rows)", len(THEME_SETTING_SEEDS))
|
||||||
if dry_run:
|
if dry_run:
|
||||||
|
for key, value, _, _, _, _ in THEME_SETTING_SEEDS:
|
||||||
|
logger.info(
|
||||||
|
"Dry run: would upsert theme setting %s = %s", key, value)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
execute_values(
|
||||||
|
cursor,
|
||||||
|
"""
|
||||||
|
INSERT INTO application_setting (key, value, value_type, category, description, is_editable)
|
||||||
|
VALUES %s
|
||||||
|
ON CONFLICT (key) DO UPDATE
|
||||||
|
SET value = EXCLUDED.value,
|
||||||
|
value_type = EXCLUDED.value_type,
|
||||||
|
category = EXCLUDED.category,
|
||||||
|
description = EXCLUDED.description,
|
||||||
|
is_editable = EXCLUDED.is_editable
|
||||||
|
""",
|
||||||
|
THEME_SETTING_SEEDS,
|
||||||
|
)
|
||||||
|
except errors.UndefinedTable:
|
||||||
|
logger.warning(
|
||||||
|
"application_setting table does not exist; skipping theme seeding."
|
||||||
|
)
|
||||||
|
cursor.connection.rollback()
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Theme settings seed complete")
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_defaults(cursor, *, dry_run: bool) -> None:
|
||||||
|
logger.info("Seeding default records")
|
||||||
|
_seed_theme(cursor, dry_run=dry_run)
|
||||||
|
logger.info("Default records seed complete")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
@@ -39,6 +39,7 @@ from psycopg2 import extensions
|
|||||||
from psycopg2.extensions import connection as PGConnection, parse_dsn
|
from psycopg2.extensions import connection as PGConnection, parse_dsn
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from sqlalchemy import create_engine, inspect
|
from sqlalchemy import create_engine, inspect
|
||||||
|
|
||||||
ROOT_DIR = Path(__file__).resolve().parents[1]
|
ROOT_DIR = Path(__file__).resolve().parents[1]
|
||||||
if str(ROOT_DIR) not in sys.path:
|
if str(ROOT_DIR) not in sys.path:
|
||||||
sys.path.insert(0, str(ROOT_DIR))
|
sys.path.insert(0, str(ROOT_DIR))
|
||||||
@@ -125,8 +126,7 @@ class DatabaseConfig:
|
|||||||
]
|
]
|
||||||
if missing:
|
if missing:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Missing required database configuration: " +
|
"Missing required database configuration: " + ", ".join(missing)
|
||||||
", ".join(missing)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
host = cast(str, host)
|
host = cast(str, host)
|
||||||
@@ -208,12 +208,17 @@ class DatabaseConfig:
|
|||||||
class DatabaseSetup:
|
class DatabaseSetup:
|
||||||
"""Encapsulates the full setup workflow."""
|
"""Encapsulates the full setup workflow."""
|
||||||
|
|
||||||
def __init__(self, config: DatabaseConfig, *, dry_run: bool = False) -> None:
|
def __init__(
|
||||||
|
self, config: DatabaseConfig, *, dry_run: bool = False
|
||||||
|
) -> None:
|
||||||
self.config = config
|
self.config = config
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self._models_loaded = False
|
self._models_loaded = False
|
||||||
self._rollback_actions: list[tuple[str, Callable[[], None]]] = []
|
self._rollback_actions: list[tuple[str, Callable[[], None]]] = []
|
||||||
def _register_rollback(self, label: str, action: Callable[[], None]) -> None:
|
|
||||||
|
def _register_rollback(
|
||||||
|
self, label: str, action: Callable[[], None]
|
||||||
|
) -> None:
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
return
|
return
|
||||||
self._rollback_actions.append((label, action))
|
self._rollback_actions.append((label, action))
|
||||||
@@ -237,7 +242,6 @@ class DatabaseSetup:
|
|||||||
def clear_rollbacks(self) -> None:
|
def clear_rollbacks(self) -> None:
|
||||||
self._rollback_actions.clear()
|
self._rollback_actions.clear()
|
||||||
|
|
||||||
|
|
||||||
def _describe_connection(self, user: str, database: str) -> str:
|
def _describe_connection(self, user: str, database: str) -> str:
|
||||||
return f"{user}@{self.config.host}:{self.config.port}/{database}"
|
return f"{user}@{self.config.host}:{self.config.port}/{database}"
|
||||||
|
|
||||||
@@ -384,9 +388,9 @@ class DatabaseSetup:
|
|||||||
try:
|
try:
|
||||||
if self.config.password:
|
if self.config.password:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("CREATE ROLE {} WITH LOGIN PASSWORD %s").format(
|
sql.SQL(
|
||||||
sql.Identifier(self.config.user)
|
"CREATE ROLE {} WITH LOGIN PASSWORD %s"
|
||||||
),
|
).format(sql.Identifier(self.config.user)),
|
||||||
(self.config.password,),
|
(self.config.password,),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -589,8 +593,7 @@ class DatabaseSetup:
|
|||||||
return psycopg2.connect(dsn)
|
return psycopg2.connect(dsn)
|
||||||
except psycopg2.Error as exc:
|
except psycopg2.Error as exc:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Unable to establish admin connection. "
|
"Unable to establish admin connection. " f"Target: {descriptor}"
|
||||||
f"Target: {descriptor}"
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
def _application_connection(self) -> PGConnection:
|
def _application_connection(self) -> PGConnection:
|
||||||
@@ -645,7 +648,9 @@ class DatabaseSetup:
|
|||||||
importlib.import_module(f"{package.__name__}.{module_info.name}")
|
importlib.import_module(f"{package.__name__}.{module_info.name}")
|
||||||
self._models_loaded = True
|
self._models_loaded = True
|
||||||
|
|
||||||
def run_migrations(self, migrations_dir: Optional[Path | str] = None) -> None:
|
def run_migrations(
|
||||||
|
self, migrations_dir: Optional[Path | str] = None
|
||||||
|
) -> None:
|
||||||
"""Execute pending SQL migrations in chronological order."""
|
"""Execute pending SQL migrations in chronological order."""
|
||||||
|
|
||||||
directory = (
|
directory = (
|
||||||
@@ -673,7 +678,8 @@ class DatabaseSetup:
|
|||||||
conn.autocommit = True
|
conn.autocommit = True
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
table_exists = self._migrations_table_exists(
|
table_exists = self._migrations_table_exists(
|
||||||
cursor, schema_name)
|
cursor, schema_name
|
||||||
|
)
|
||||||
if not table_exists:
|
if not table_exists:
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -692,12 +698,10 @@ class DatabaseSetup:
|
|||||||
applied = set()
|
applied = set()
|
||||||
else:
|
else:
|
||||||
applied = self._fetch_applied_migrations(
|
applied = self._fetch_applied_migrations(
|
||||||
cursor, schema_name)
|
cursor, schema_name
|
||||||
|
)
|
||||||
|
|
||||||
if (
|
if baseline_path.exists() and baseline_name not in applied:
|
||||||
baseline_path.exists()
|
|
||||||
and baseline_name not in applied
|
|
||||||
):
|
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
|
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
|
||||||
@@ -756,9 +760,7 @@ class DatabaseSetup:
|
|||||||
)
|
)
|
||||||
|
|
||||||
pending = [
|
pending = [
|
||||||
path
|
path for path in migration_files if path.name not in applied
|
||||||
for path in migration_files
|
|
||||||
if path.name not in applied
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if not pending:
|
if not pending:
|
||||||
@@ -792,9 +794,7 @@ class DatabaseSetup:
|
|||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL(
|
sql.SQL(
|
||||||
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
|
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
|
||||||
).format(
|
).format(sql.Identifier(schema_name, MIGRATIONS_TABLE)),
|
||||||
sql.Identifier(schema_name, MIGRATIONS_TABLE)
|
|
||||||
),
|
|
||||||
(path.name,),
|
(path.name,),
|
||||||
)
|
)
|
||||||
return path.name
|
return path.name
|
||||||
@@ -820,9 +820,7 @@ class DatabaseSetup:
|
|||||||
"filename TEXT PRIMARY KEY,"
|
"filename TEXT PRIMARY KEY,"
|
||||||
"applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()"
|
"applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()"
|
||||||
")"
|
")"
|
||||||
).format(
|
).format(sql.Identifier(schema_name, MIGRATIONS_TABLE))
|
||||||
sql.Identifier(schema_name, MIGRATIONS_TABLE)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _fetch_applied_migrations(self, cursor, schema_name: str) -> set[str]:
|
def _fetch_applied_migrations(self, cursor, schema_name: str) -> set[str]:
|
||||||
@@ -974,7 +972,7 @@ class DatabaseSetup:
|
|||||||
(database,),
|
(database,),
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("DROP DATABASE IF EXISTS {}" ).format(
|
sql.SQL("DROP DATABASE IF EXISTS {}").format(
|
||||||
sql.Identifier(database)
|
sql.Identifier(database)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -985,7 +983,7 @@ class DatabaseSetup:
|
|||||||
conn.autocommit = True
|
conn.autocommit = True
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("DROP ROLE IF EXISTS {}" ).format(
|
sql.SQL("DROP ROLE IF EXISTS {}").format(
|
||||||
sql.Identifier(role)
|
sql.Identifier(role)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -1000,27 +998,35 @@ class DatabaseSetup:
|
|||||||
conn.autocommit = True
|
conn.autocommit = True
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} FROM {}" ).format(
|
sql.SQL(
|
||||||
|
"REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} FROM {}"
|
||||||
|
).format(
|
||||||
sql.Identifier(schema_name),
|
sql.Identifier(schema_name),
|
||||||
sql.Identifier(self.config.user)
|
sql.Identifier(self.config.user),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} FROM {}" ).format(
|
sql.SQL(
|
||||||
|
"REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} FROM {}"
|
||||||
|
).format(
|
||||||
sql.Identifier(schema_name),
|
sql.Identifier(schema_name),
|
||||||
sql.Identifier(self.config.user)
|
sql.Identifier(self.config.user),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM {}" ).format(
|
sql.SQL(
|
||||||
|
"ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM {}"
|
||||||
|
).format(
|
||||||
sql.Identifier(schema_name),
|
sql.Identifier(schema_name),
|
||||||
sql.Identifier(self.config.user)
|
sql.Identifier(self.config.user),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE USAGE, SELECT ON SEQUENCES FROM {}" ).format(
|
sql.SQL(
|
||||||
|
"ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE USAGE, SELECT ON SEQUENCES FROM {}"
|
||||||
|
).format(
|
||||||
sql.Identifier(schema_name),
|
sql.Identifier(schema_name),
|
||||||
sql.Identifier(self.config.user)
|
sql.Identifier(self.config.user),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1064,19 +1070,18 @@ def parse_args() -> argparse.Namespace:
|
|||||||
)
|
)
|
||||||
parser.add_argument("--db-driver", help="Override DATABASE_DRIVER")
|
parser.add_argument("--db-driver", help="Override DATABASE_DRIVER")
|
||||||
parser.add_argument("--db-host", help="Override DATABASE_HOST")
|
parser.add_argument("--db-host", help="Override DATABASE_HOST")
|
||||||
parser.add_argument("--db-port", type=int,
|
parser.add_argument("--db-port", type=int, help="Override DATABASE_PORT")
|
||||||
help="Override DATABASE_PORT")
|
|
||||||
parser.add_argument("--db-name", help="Override DATABASE_NAME")
|
parser.add_argument("--db-name", help="Override DATABASE_NAME")
|
||||||
parser.add_argument("--db-user", help="Override DATABASE_USER")
|
parser.add_argument("--db-user", help="Override DATABASE_USER")
|
||||||
parser.add_argument(
|
parser.add_argument("--db-password", help="Override DATABASE_PASSWORD")
|
||||||
"--db-password", help="Override DATABASE_PASSWORD")
|
|
||||||
parser.add_argument("--db-schema", help="Override DATABASE_SCHEMA")
|
parser.add_argument("--db-schema", help="Override DATABASE_SCHEMA")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--admin-url",
|
"--admin-url",
|
||||||
help="Override DATABASE_ADMIN_URL for administrative operations",
|
help="Override DATABASE_ADMIN_URL for administrative operations",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--admin-user", help="Override DATABASE_SUPERUSER for admin ops")
|
"--admin-user", help="Override DATABASE_SUPERUSER for admin ops"
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--admin-password",
|
"--admin-password",
|
||||||
help="Override DATABASE_SUPERUSER_PASSWORD for admin ops",
|
help="Override DATABASE_SUPERUSER_PASSWORD for admin ops",
|
||||||
@@ -1091,7 +1096,11 @@ def parse_args() -> argparse.Namespace:
|
|||||||
help="Log actions without applying changes.",
|
help="Log actions without applying changes.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--verbose", "-v", action="count", default=0, help="Increase logging verbosity"
|
"--verbose",
|
||||||
|
"-v",
|
||||||
|
action="count",
|
||||||
|
default=0,
|
||||||
|
help="Increase logging verbosity",
|
||||||
)
|
)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
@@ -1099,8 +1108,9 @@ def parse_args() -> argparse.Namespace:
|
|||||||
def main() -> None:
|
def main() -> None:
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
level = logging.WARNING - (10 * min(args.verbose, 2))
|
level = logging.WARNING - (10 * min(args.verbose, 2))
|
||||||
logging.basicConfig(level=max(level, logging.INFO),
|
logging.basicConfig(
|
||||||
format="%(levelname)s %(message)s")
|
level=max(level, logging.INFO), format="%(levelname)s %(message)s"
|
||||||
|
)
|
||||||
|
|
||||||
override_args: dict[str, Optional[str]] = {
|
override_args: dict[str, Optional[str]] = {
|
||||||
"DATABASE_DRIVER": args.db_driver,
|
"DATABASE_DRIVER": args.db_driver,
|
||||||
@@ -1120,7 +1130,9 @@ def main() -> None:
|
|||||||
config = DatabaseConfig.from_env(overrides=override_args)
|
config = DatabaseConfig.from_env(overrides=override_args)
|
||||||
setup = DatabaseSetup(config, dry_run=args.dry_run)
|
setup = DatabaseSetup(config, dry_run=args.dry_run)
|
||||||
|
|
||||||
admin_tasks_requested = args.ensure_database or args.ensure_role or args.ensure_schema
|
admin_tasks_requested = (
|
||||||
|
args.ensure_database or args.ensure_role or args.ensure_schema
|
||||||
|
)
|
||||||
if admin_tasks_requested:
|
if admin_tasks_requested:
|
||||||
setup.validate_admin_connection()
|
setup.validate_admin_connection()
|
||||||
|
|
||||||
@@ -1141,6 +1153,12 @@ def main() -> None:
|
|||||||
app_validated = True
|
app_validated = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
should_run_migrations = args.run_migrations
|
||||||
|
auto_run_migrations_reason: Optional[str] = None
|
||||||
|
if args.seed_data and not should_run_migrations:
|
||||||
|
should_run_migrations = True
|
||||||
|
auto_run_migrations_reason = "Seed data requested without explicit --run-migrations; applying migrations first."
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if args.ensure_database:
|
if args.ensure_database:
|
||||||
setup.ensure_database()
|
setup.ensure_database()
|
||||||
@@ -1154,12 +1172,12 @@ def main() -> None:
|
|||||||
"SQLAlchemy schema initialization"
|
"SQLAlchemy schema initialization"
|
||||||
):
|
):
|
||||||
setup.initialize_schema()
|
setup.initialize_schema()
|
||||||
if args.run_migrations:
|
if should_run_migrations:
|
||||||
if ensure_application_connection_for("migration execution"):
|
if ensure_application_connection_for("migration execution"):
|
||||||
|
if auto_run_migrations_reason:
|
||||||
|
logger.info(auto_run_migrations_reason)
|
||||||
migrations_path = (
|
migrations_path = (
|
||||||
Path(args.migrations_dir)
|
Path(args.migrations_dir) if args.migrations_dir else None
|
||||||
if args.migrations_dir
|
|
||||||
else None
|
|
||||||
)
|
)
|
||||||
setup.run_migrations(migrations_path)
|
setup.run_migrations(migrations_path)
|
||||||
if args.seed_data:
|
if args.seed_data:
|
||||||
|
|||||||
@@ -27,7 +27,9 @@ def _percentile(values: List[float], percentile: float) -> float:
|
|||||||
return sorted_values[lower] * (1 - weight) + sorted_values[upper] * weight
|
return sorted_values[lower] * (1 - weight) + sorted_values[upper] * weight
|
||||||
|
|
||||||
|
|
||||||
def generate_report(simulation_results: List[Dict[str, float]]) -> Dict[str, Union[float, int]]:
|
def generate_report(
|
||||||
|
simulation_results: List[Dict[str, float]],
|
||||||
|
) -> Dict[str, Union[float, int]]:
|
||||||
"""Aggregate basic statistics for simulation outputs."""
|
"""Aggregate basic statistics for simulation outputs."""
|
||||||
|
|
||||||
values = _extract_results(simulation_results)
|
values = _extract_results(simulation_results)
|
||||||
@@ -63,7 +65,7 @@ def generate_report(simulation_results: List[Dict[str, float]]) -> Dict[str, Uni
|
|||||||
|
|
||||||
std_dev = pstdev(values) if len(values) > 1 else 0.0
|
std_dev = pstdev(values) if len(values) > 1 else 0.0
|
||||||
summary["std_dev"] = std_dev
|
summary["std_dev"] = std_dev
|
||||||
summary["variance"] = std_dev ** 2
|
summary["variance"] = std_dev**2
|
||||||
|
|
||||||
var_95 = summary["percentile_5"]
|
var_95 = summary["percentile_5"]
|
||||||
summary["value_at_risk_95"] = var_95
|
summary["value_at_risk_95"] = var_95
|
||||||
|
|||||||
32
services/security.py
Normal file
32
services/security.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
from jose import jwt
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
||||||
|
SECRET_KEY = "your-secret-key" # Change this in production
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
|
||||||
|
|
||||||
|
|
||||||
|
def create_access_token(
|
||||||
|
subject: Union[str, Any], expires_delta: Union[timedelta, None] = None
|
||||||
|
) -> str:
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
to_encode = {"exp": expire, "sub": str(subject)}
|
||||||
|
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
return pwd_context.verify(plain_password, hashed_password)
|
||||||
|
|
||||||
|
|
||||||
|
def get_password_hash(password: str) -> str:
|
||||||
|
return pwd_context.hash(password)
|
||||||
230
services/settings.py
Normal file
230
services/settings.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Dict, Mapping
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from models.application_setting import ApplicationSetting
|
||||||
|
from models.theme_setting import ThemeSetting # Import ThemeSetting model
|
||||||
|
|
||||||
|
CSS_COLOR_CATEGORY = "theme"
|
||||||
|
CSS_COLOR_VALUE_TYPE = "color"
|
||||||
|
CSS_ENV_PREFIX = "CALMINER_THEME_"
|
||||||
|
|
||||||
|
CSS_COLOR_DEFAULTS: Dict[str, str] = {
|
||||||
|
"--color-background": "#f4f5f7",
|
||||||
|
"--color-surface": "#ffffff",
|
||||||
|
"--color-text-primary": "#2a1f33",
|
||||||
|
"--color-text-secondary": "#624769",
|
||||||
|
"--color-text-muted": "#64748b",
|
||||||
|
"--color-text-subtle": "#94a3b8",
|
||||||
|
"--color-text-invert": "#ffffff",
|
||||||
|
"--color-text-dark": "#0f172a",
|
||||||
|
"--color-text-strong": "#111827",
|
||||||
|
"--color-primary": "#5f320d",
|
||||||
|
"--color-primary-strong": "#7e4c13",
|
||||||
|
"--color-primary-stronger": "#837c15",
|
||||||
|
"--color-accent": "#bff838",
|
||||||
|
"--color-border": "#e2e8f0",
|
||||||
|
"--color-border-strong": "#cbd5e1",
|
||||||
|
"--color-highlight": "#eef2ff",
|
||||||
|
"--color-panel-shadow": "rgba(15, 23, 42, 0.08)",
|
||||||
|
"--color-panel-shadow-deep": "rgba(15, 23, 42, 0.12)",
|
||||||
|
"--color-surface-alt": "#f8fafc",
|
||||||
|
"--color-success": "#047857",
|
||||||
|
"--color-error": "#b91c1c",
|
||||||
|
}
|
||||||
|
|
||||||
|
_COLOR_VALUE_PATTERN = re.compile(
|
||||||
|
r"^(#([0-9a-fA-F]{3}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})|rgba?\([^)]+\)|hsla?\([^)]+\))$",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_css_color_settings(db: Session) -> Dict[str, ApplicationSetting]:
|
||||||
|
"""Ensure the CSS color defaults exist in the settings table."""
|
||||||
|
|
||||||
|
existing = (
|
||||||
|
db.query(ApplicationSetting)
|
||||||
|
.filter(ApplicationSetting.key.in_(CSS_COLOR_DEFAULTS.keys()))
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
by_key = {setting.key: setting for setting in existing}
|
||||||
|
|
||||||
|
created = False
|
||||||
|
for key, default_value in CSS_COLOR_DEFAULTS.items():
|
||||||
|
if key in by_key:
|
||||||
|
continue
|
||||||
|
setting = ApplicationSetting(
|
||||||
|
key=key,
|
||||||
|
value=default_value,
|
||||||
|
value_type=CSS_COLOR_VALUE_TYPE,
|
||||||
|
category=CSS_COLOR_CATEGORY,
|
||||||
|
description=f"CSS variable {key}",
|
||||||
|
is_editable=True,
|
||||||
|
)
|
||||||
|
db.add(setting)
|
||||||
|
by_key[key] = setting
|
||||||
|
created = True
|
||||||
|
|
||||||
|
if created:
|
||||||
|
db.commit()
|
||||||
|
for key, setting in by_key.items():
|
||||||
|
db.refresh(setting)
|
||||||
|
|
||||||
|
return by_key
|
||||||
|
|
||||||
|
|
||||||
|
def get_css_color_settings(db: Session) -> Dict[str, str]:
|
||||||
|
"""Return CSS color variables, filling missing values with defaults."""
|
||||||
|
|
||||||
|
settings = ensure_css_color_settings(db)
|
||||||
|
values: Dict[str, str] = {
|
||||||
|
key: settings[key].value if key in settings else default
|
||||||
|
for key, default in CSS_COLOR_DEFAULTS.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
env_overrides = read_css_color_env_overrides(os.environ)
|
||||||
|
if env_overrides:
|
||||||
|
values.update(env_overrides)
|
||||||
|
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
def update_css_color_settings(
|
||||||
|
db: Session, updates: Mapping[str, str]
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Persist provided CSS color overrides and return the final values."""
|
||||||
|
|
||||||
|
if not updates:
|
||||||
|
return get_css_color_settings(db)
|
||||||
|
|
||||||
|
invalid_keys = sorted(set(updates.keys()) - set(CSS_COLOR_DEFAULTS.keys()))
|
||||||
|
if invalid_keys:
|
||||||
|
invalid_list = ", ".join(invalid_keys)
|
||||||
|
raise ValueError(f"Unsupported CSS variables: {invalid_list}")
|
||||||
|
|
||||||
|
normalized: Dict[str, str] = {}
|
||||||
|
for key, value in updates.items():
|
||||||
|
normalized[key] = _normalize_color_value(value)
|
||||||
|
|
||||||
|
settings = ensure_css_color_settings(db)
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
for key, value in normalized.items():
|
||||||
|
setting = settings[key]
|
||||||
|
if setting.value != value:
|
||||||
|
setting.value = value
|
||||||
|
changed = True
|
||||||
|
if setting.value_type != CSS_COLOR_VALUE_TYPE:
|
||||||
|
setting.value_type = CSS_COLOR_VALUE_TYPE
|
||||||
|
changed = True
|
||||||
|
if setting.category != CSS_COLOR_CATEGORY:
|
||||||
|
setting.category = CSS_COLOR_CATEGORY
|
||||||
|
changed = True
|
||||||
|
if not setting.is_editable:
|
||||||
|
setting.is_editable = True
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
db.commit()
|
||||||
|
for key in normalized.keys():
|
||||||
|
db.refresh(settings[key])
|
||||||
|
|
||||||
|
return get_css_color_settings(db)
|
||||||
|
|
||||||
|
|
||||||
|
def read_css_color_env_overrides(
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Return validated CSS overrides sourced from environment variables."""
|
||||||
|
|
||||||
|
if env is None:
|
||||||
|
env = os.environ
|
||||||
|
|
||||||
|
overrides: Dict[str, str] = {}
|
||||||
|
for css_key in CSS_COLOR_DEFAULTS.keys():
|
||||||
|
env_name = css_key_to_env_var(css_key)
|
||||||
|
raw_value = env.get(env_name)
|
||||||
|
if raw_value is None:
|
||||||
|
continue
|
||||||
|
overrides[css_key] = _normalize_color_value(raw_value)
|
||||||
|
|
||||||
|
return overrides
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_color_value(value: str) -> str:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError("Color value must be a string")
|
||||||
|
trimmed = value.strip()
|
||||||
|
if not trimmed:
|
||||||
|
raise ValueError("Color value cannot be empty")
|
||||||
|
if not _COLOR_VALUE_PATTERN.match(trimmed):
|
||||||
|
raise ValueError(
|
||||||
|
"Color value must be a hex code or an rgb/rgba/hsl/hsla expression"
|
||||||
|
)
|
||||||
|
_validate_functional_color(trimmed)
|
||||||
|
return trimmed
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_functional_color(value: str) -> None:
|
||||||
|
lowered = value.lower()
|
||||||
|
if lowered.startswith("rgb(") or lowered.startswith("hsl("):
|
||||||
|
_ensure_component_count(value, expected=3)
|
||||||
|
elif lowered.startswith("rgba(") or lowered.startswith("hsla("):
|
||||||
|
_ensure_component_count(value, expected=4)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_component_count(value: str, expected: int) -> None:
|
||||||
|
if not value.endswith(")"):
|
||||||
|
raise ValueError(
|
||||||
|
"Color function expressions must end with a closing parenthesis"
|
||||||
|
)
|
||||||
|
inner = value[value.index("(") + 1: -1]
|
||||||
|
parts = [segment.strip() for segment in inner.split(",")]
|
||||||
|
if len(parts) != expected:
|
||||||
|
raise ValueError(
|
||||||
|
"Color function expressions must provide the expected number of components"
|
||||||
|
)
|
||||||
|
if any(not component for component in parts):
|
||||||
|
raise ValueError("Color function components cannot be empty")
|
||||||
|
|
||||||
|
|
||||||
|
def css_key_to_env_var(css_key: str) -> str:
|
||||||
|
sanitized = css_key.lstrip("-").replace("-", "_").upper()
|
||||||
|
return f"{CSS_ENV_PREFIX}{sanitized}"
|
||||||
|
|
||||||
|
|
||||||
|
def list_css_env_override_rows(
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
) -> list[Dict[str, str]]:
|
||||||
|
overrides = read_css_color_env_overrides(env)
|
||||||
|
rows: list[Dict[str, str]] = []
|
||||||
|
for css_key, value in overrides.items():
|
||||||
|
rows.append(
|
||||||
|
{
|
||||||
|
"css_key": css_key,
|
||||||
|
"env_var": css_key_to_env_var(css_key),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def save_theme_settings(db: Session, theme_data: dict):
|
||||||
|
theme = db.query(ThemeSetting).first() or ThemeSetting()
|
||||||
|
for key, value in theme_data.items():
|
||||||
|
setattr(theme, key, value)
|
||||||
|
db.add(theme)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(theme)
|
||||||
|
return theme
|
||||||
|
|
||||||
|
|
||||||
|
def get_theme_settings(db: Session):
|
||||||
|
theme = db.query(ThemeSetting).first()
|
||||||
|
if theme:
|
||||||
|
return {c.name: getattr(theme, c.name) for c in theme.__table__.columns}
|
||||||
|
return {}
|
||||||
@@ -25,12 +25,13 @@ def _ensure_positive_span(span: float, fallback: float) -> float:
|
|||||||
return span if span and span > 0 else fallback
|
return span if span and span > 0 else fallback
|
||||||
|
|
||||||
|
|
||||||
def _compile_parameters(parameters: Sequence[Dict[str, float]]) -> List[SimulationParameter]:
|
def _compile_parameters(
|
||||||
|
parameters: Sequence[Dict[str, float]],
|
||||||
|
) -> List[SimulationParameter]:
|
||||||
compiled: List[SimulationParameter] = []
|
compiled: List[SimulationParameter] = []
|
||||||
for index, item in enumerate(parameters):
|
for index, item in enumerate(parameters):
|
||||||
if "value" not in item:
|
if "value" not in item:
|
||||||
raise ValueError(
|
raise ValueError(f"Parameter at index {index} must include 'value'")
|
||||||
f"Parameter at index {index} must include 'value'")
|
|
||||||
name = str(item.get("name", f"param_{index}"))
|
name = str(item.get("name", f"param_{index}"))
|
||||||
base_value = float(item["value"])
|
base_value = float(item["value"])
|
||||||
distribution = str(item.get("distribution", "normal")).lower()
|
distribution = str(item.get("distribution", "normal")).lower()
|
||||||
@@ -43,8 +44,11 @@ def _compile_parameters(parameters: Sequence[Dict[str, float]]) -> List[Simulati
|
|||||||
|
|
||||||
if distribution == "normal":
|
if distribution == "normal":
|
||||||
std_dev = item.get("std_dev")
|
std_dev = item.get("std_dev")
|
||||||
std_dev_value = float(std_dev) if std_dev is not None else abs(
|
std_dev_value = (
|
||||||
base_value) * DEFAULT_STD_DEV_RATIO or 1.0
|
float(std_dev)
|
||||||
|
if std_dev is not None
|
||||||
|
else abs(base_value) * DEFAULT_STD_DEV_RATIO or 1.0
|
||||||
|
)
|
||||||
compiled.append(
|
compiled.append(
|
||||||
SimulationParameter(
|
SimulationParameter(
|
||||||
name=name,
|
name=name,
|
||||||
|
|||||||
@@ -117,6 +117,37 @@ body {
|
|||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.sidebar-section {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.35rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section + .sidebar-section {
|
||||||
|
margin-top: 1.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section-label {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.06em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: rgba(255, 255, 255, 0.52);
|
||||||
|
padding: 0 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section-links {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-link-block {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
.sidebar-link {
|
.sidebar-link {
|
||||||
display: inline-flex;
|
display: inline-flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
@@ -142,6 +173,39 @@ body {
|
|||||||
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.25);
|
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.25);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.sidebar-sublinks {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.2rem;
|
||||||
|
padding-left: 1.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
color: rgba(255, 255, 255, 0.74);
|
||||||
|
font-weight: 500;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
text-decoration: none;
|
||||||
|
padding: 0.35rem 0.75rem;
|
||||||
|
border-radius: 8px;
|
||||||
|
transition: background 0.2s ease, color 0.2s ease, transform 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink:hover,
|
||||||
|
.sidebar-sublink:focus {
|
||||||
|
background: rgba(148, 197, 255, 0.18);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
transform: translateX(3px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink.is-active {
|
||||||
|
background: rgba(148, 197, 255, 0.28);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.18);
|
||||||
|
}
|
||||||
|
|
||||||
.app-main {
|
.app-main {
|
||||||
background-color: var(--color-background);
|
background-color: var(--color-background);
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -185,6 +249,159 @@ body {
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.page-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 1.5rem;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.page-subtitle {
|
||||||
|
margin-top: 0.35rem;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-size: 0.95rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
gap: 1.5rem;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card {
|
||||||
|
background: var(--color-surface);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 1.5rem;
|
||||||
|
box-shadow: 0 4px 14px var(--color-panel-shadow);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card h2 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card p {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card-note {
|
||||||
|
font-size: 0.85rem;
|
||||||
|
color: var(--color-text-subtle);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-grid {
|
||||||
|
max-width: none;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-field {
|
||||||
|
background: var(--color-surface-alt);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: var(--space-sm);
|
||||||
|
box-shadow: inset 0 1px 2px rgba(15, 23, 42, 0.08);
|
||||||
|
gap: var(--space-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-field.is-env-override {
|
||||||
|
background: rgba(191, 248, 56, 0.12);
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: var(--space-sm);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-strong);
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-default {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-helper {
|
||||||
|
font-size: 0.8rem;
|
||||||
|
color: var(--color-text-subtle);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-env-flag {
|
||||||
|
font-size: 0.78rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-accent);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-input-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-value-input {
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-value-input[disabled] {
|
||||||
|
background-color: rgba(148, 197, 255, 0.16);
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-preview {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 8px;
|
||||||
|
border: 1px solid var(--color-border-strong);
|
||||||
|
box-shadow: inset 0 0 0 1px rgba(15, 23, 42, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table th,
|
||||||
|
.env-overrides-table td {
|
||||||
|
padding: 0.65rem 0.75rem;
|
||||||
|
text-align: left;
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table code {
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.button-link {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: fit-content;
|
||||||
|
padding: 0.55rem 1.2rem;
|
||||||
|
border-radius: 999px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-decoration: none;
|
||||||
|
background: var(--color-primary);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
transition: transform 0.2s ease, box-shadow 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.button-link:hover,
|
||||||
|
.button-link:focus {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: 0 8px 18px var(--color-panel-shadow);
|
||||||
|
}
|
||||||
|
|
||||||
.dashboard-metrics-grid {
|
.dashboard-metrics-grid {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
||||||
|
|||||||
200
static/js/settings.js
Normal file
200
static/js/settings.js
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
(function () {
|
||||||
|
const dataScript = document.getElementById("theme-settings-data");
|
||||||
|
const form = document.getElementById("theme-settings-form");
|
||||||
|
const feedbackEl = document.getElementById("theme-settings-feedback");
|
||||||
|
const resetBtn = document.getElementById("theme-settings-reset");
|
||||||
|
const panel = document.getElementById("theme-settings");
|
||||||
|
|
||||||
|
if (!dataScript || !form || !feedbackEl || !panel) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiUrl = panel.getAttribute("data-api");
|
||||||
|
if (!apiUrl) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = JSON.parse(dataScript.textContent || "{}");
|
||||||
|
const currentValues = { ...(parsed.variables || {}) };
|
||||||
|
const defaultValues = parsed.defaults || {};
|
||||||
|
let envOverrides = { ...(parsed.envOverrides || {}) };
|
||||||
|
|
||||||
|
const previewElements = new Map();
|
||||||
|
const inputs = Array.from(form.querySelectorAll(".color-value-input"));
|
||||||
|
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
const field = input.closest(".color-form-field");
|
||||||
|
const preview = field ? field.querySelector(".color-preview") : null;
|
||||||
|
if (preview) {
|
||||||
|
previewElements.set(input, preview);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.prototype.hasOwnProperty.call(envOverrides, key)) {
|
||||||
|
const overrideValue = envOverrides[key];
|
||||||
|
input.value = overrideValue;
|
||||||
|
input.disabled = true;
|
||||||
|
input.setAttribute("aria-disabled", "true");
|
||||||
|
input.dataset.envOverride = "true";
|
||||||
|
if (field) {
|
||||||
|
field.classList.add("is-env-override");
|
||||||
|
}
|
||||||
|
if (preview) {
|
||||||
|
preview.style.background = overrideValue;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
input.addEventListener("input", () => {
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = input.value || defaultValues[key] || "";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
function setFeedback(message, type) {
|
||||||
|
feedbackEl.textContent = message;
|
||||||
|
feedbackEl.classList.remove("hidden", "success", "error");
|
||||||
|
if (type) {
|
||||||
|
feedbackEl.classList.add(type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearFeedback() {
|
||||||
|
feedbackEl.textContent = "";
|
||||||
|
feedbackEl.classList.add("hidden");
|
||||||
|
feedbackEl.classList.remove("success", "error");
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateRootVariables(values) {
|
||||||
|
if (!values) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const root = document.documentElement;
|
||||||
|
Object.entries(values).forEach(([key, value]) => {
|
||||||
|
if (typeof key === "string" && typeof value === "string") {
|
||||||
|
root.style.setProperty(key, value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function resetTo(source) {
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
if (input.disabled) {
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
const fallback = envOverrides[key] || currentValues[key];
|
||||||
|
if (previewEl && fallback) {
|
||||||
|
previewEl.style.background = fallback;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
||||||
|
input.value = source[key];
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = source[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize previews to current values after page load.
|
||||||
|
resetTo(currentValues);
|
||||||
|
|
||||||
|
resetBtn?.addEventListener("click", () => {
|
||||||
|
resetTo(defaultValues);
|
||||||
|
clearFeedback();
|
||||||
|
setFeedback("Reverted to default values. Submit to save.", "success");
|
||||||
|
});
|
||||||
|
|
||||||
|
form.addEventListener("submit", async (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
clearFeedback();
|
||||||
|
|
||||||
|
const payload = {};
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
if (input.disabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
payload[input.name] = input.value.trim();
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(apiUrl, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ variables: payload }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let detail = "Unable to save theme settings.";
|
||||||
|
try {
|
||||||
|
const errorData = await response.json();
|
||||||
|
if (errorData?.detail) {
|
||||||
|
detail = Array.isArray(errorData.detail)
|
||||||
|
? errorData.detail.map((item) => item.msg || item).join("; ")
|
||||||
|
: errorData.detail;
|
||||||
|
}
|
||||||
|
} catch (parseError) {
|
||||||
|
// Ignore JSON parse errors and use default detail message.
|
||||||
|
}
|
||||||
|
setFeedback(detail, "error");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
const variables = data?.variables || {};
|
||||||
|
const responseOverrides = data?.env_overrides || {};
|
||||||
|
|
||||||
|
Object.assign(currentValues, variables);
|
||||||
|
envOverrides = { ...responseOverrides };
|
||||||
|
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
const field = input.closest(".color-form-field");
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
const isOverride = Object.prototype.hasOwnProperty.call(
|
||||||
|
envOverrides,
|
||||||
|
key,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isOverride) {
|
||||||
|
const overrideValue = envOverrides[key];
|
||||||
|
input.value = overrideValue;
|
||||||
|
if (!input.disabled) {
|
||||||
|
input.disabled = true;
|
||||||
|
input.setAttribute("aria-disabled", "true");
|
||||||
|
}
|
||||||
|
if (field) {
|
||||||
|
field.classList.add("is-env-override");
|
||||||
|
}
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = overrideValue;
|
||||||
|
}
|
||||||
|
} else if (input.disabled) {
|
||||||
|
input.disabled = false;
|
||||||
|
input.removeAttribute("aria-disabled");
|
||||||
|
if (field) {
|
||||||
|
field.classList.remove("is-env-override");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
previewEl &&
|
||||||
|
Object.prototype.hasOwnProperty.call(variables, key)
|
||||||
|
) {
|
||||||
|
previewEl.style.background = variables[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
updateRootVariables(variables);
|
||||||
|
resetTo(variables);
|
||||||
|
setFeedback("Theme colors updated successfully.", "success");
|
||||||
|
} catch (error) {
|
||||||
|
setFeedback("Network error: unable to save settings.", "error");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})();
|
||||||
108
static/js/theme.js
Normal file
108
static/js/theme.js
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
// static/js/theme.js
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
const themeSettingsForm = document.getElementById('theme-settings-form');
|
||||||
|
const colorInputs = themeSettingsForm
|
||||||
|
? themeSettingsForm.querySelectorAll('input[type="color"]')
|
||||||
|
: [];
|
||||||
|
|
||||||
|
// Function to apply theme settings to CSS variables
|
||||||
|
function applyTheme(theme) {
|
||||||
|
const root = document.documentElement;
|
||||||
|
if (theme.primary_color)
|
||||||
|
root.style.setProperty('--color-primary', theme.primary_color);
|
||||||
|
if (theme.secondary_color)
|
||||||
|
root.style.setProperty('--color-secondary', theme.secondary_color);
|
||||||
|
if (theme.accent_color)
|
||||||
|
root.style.setProperty('--color-accent', theme.accent_color);
|
||||||
|
if (theme.background_color)
|
||||||
|
root.style.setProperty('--color-background', theme.background_color);
|
||||||
|
if (theme.text_color)
|
||||||
|
root.style.setProperty('--color-text-primary', theme.text_color);
|
||||||
|
// Add other theme properties as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save theme to local storage
|
||||||
|
function saveTheme(theme) {
|
||||||
|
localStorage.setItem('user-theme', JSON.stringify(theme));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load theme from local storage
|
||||||
|
function loadTheme() {
|
||||||
|
const savedTheme = localStorage.getItem('user-theme');
|
||||||
|
return savedTheme ? JSON.parse(savedTheme) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Real-time preview for color inputs
|
||||||
|
colorInputs.forEach((input) => {
|
||||||
|
input.addEventListener('input', (event) => {
|
||||||
|
const cssVar = `--color-${event.target.id.replace('-', '_')}`;
|
||||||
|
document.documentElement.style.setProperty(cssVar, event.target.value);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (themeSettingsForm) {
|
||||||
|
themeSettingsForm.addEventListener('submit', async (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
|
const formData = new FormData(themeSettingsForm);
|
||||||
|
const themeData = Object.fromEntries(formData.entries());
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/theme-settings', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify(themeData),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
alert('Theme settings saved successfully!');
|
||||||
|
applyTheme(themeData);
|
||||||
|
saveTheme(themeData);
|
||||||
|
} else {
|
||||||
|
const errorData = await response.json();
|
||||||
|
alert(`Error saving theme settings: ${errorData.detail}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error:', error);
|
||||||
|
alert('An error occurred while saving theme settings.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load and apply theme on page load
|
||||||
|
const initialTheme = loadTheme();
|
||||||
|
if (initialTheme) {
|
||||||
|
applyTheme(initialTheme);
|
||||||
|
// Populate form fields if on the theme settings page
|
||||||
|
if (themeSettingsForm) {
|
||||||
|
for (const key in initialTheme) {
|
||||||
|
const input = themeSettingsForm.querySelector(
|
||||||
|
`#${key.replace('_', '-')}`
|
||||||
|
);
|
||||||
|
if (input) {
|
||||||
|
input.value = initialTheme[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If no saved theme, load from backend (if available)
|
||||||
|
async function loadAndApplyThemeFromServer() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/theme-settings'); // Assuming a GET endpoint for theme settings
|
||||||
|
if (response.ok) {
|
||||||
|
const theme = await response.json();
|
||||||
|
applyTheme(theme);
|
||||||
|
saveTheme(theme); // Save to local storage for future use
|
||||||
|
} else {
|
||||||
|
console.error('Failed to load theme settings from server');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading theme settings from server:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadAndApplyThemeFromServer();
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -20,5 +20,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% block scripts %}{% endblock %}
|
{% block scripts %}{% endblock %}
|
||||||
|
<script src="/static/js/theme.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
17
templates/forgot_password.html
Normal file
17
templates/forgot_password.html
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Forgot Password{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<h1>Forgot Password</h1>
|
||||||
|
<form id="forgot-password-form">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="email">Email:</label>
|
||||||
|
<input type="email" id="email" name="email" required>
|
||||||
|
</div>
|
||||||
|
<button type="submit">Reset Password</button>
|
||||||
|
</form>
|
||||||
|
<p>Remember your password? <a href="/login">Login here</a></p>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
22
templates/login.html
Normal file
22
templates/login.html
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Login{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<h1>Login</h1>
|
||||||
|
<form id="login-form">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="username">Username:</label>
|
||||||
|
<input type="text" id="username" name="username" required>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="password">Password:</label>
|
||||||
|
<input type="password" id="password" name="password" required>
|
||||||
|
</div>
|
||||||
|
<button type="submit">Login</button>
|
||||||
|
</form>
|
||||||
|
<p>Don't have an account? <a href="/register">Register here</a></p>
|
||||||
|
<p><a href="/forgot-password">Forgot password?</a></p>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
@@ -1,17 +1,3 @@
|
|||||||
{% set nav_links = [
|
|
||||||
("/", "Dashboard"),
|
|
||||||
("/ui/scenarios", "Scenarios"),
|
|
||||||
("/ui/parameters", "Parameters"),
|
|
||||||
("/ui/currencies", "Currencies"),
|
|
||||||
("/ui/costs", "Costs"),
|
|
||||||
("/ui/consumption", "Consumption"),
|
|
||||||
("/ui/production", "Production"),
|
|
||||||
("/ui/equipment", "Equipment"),
|
|
||||||
("/ui/maintenance", "Maintenance"),
|
|
||||||
("/ui/simulations", "Simulations"),
|
|
||||||
("/ui/reporting", "Reporting"),
|
|
||||||
] %}
|
|
||||||
|
|
||||||
<div class="sidebar-inner">
|
<div class="sidebar-inner">
|
||||||
<div class="sidebar-brand">
|
<div class="sidebar-brand">
|
||||||
<span class="brand-logo" aria-hidden="true">CM</span>
|
<span class="brand-logo" aria-hidden="true">CM</span>
|
||||||
@@ -20,20 +6,5 @@
|
|||||||
<span class="brand-subtitle">Mining Planner</span>
|
<span class="brand-subtitle">Mining Planner</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<nav class="sidebar-nav" aria-label="Primary navigation">
|
{% include "partials/sidebar_nav.html" %}
|
||||||
{% set current_path = request.url.path if request else "" %}
|
|
||||||
{% for href, label in nav_links %}
|
|
||||||
{% if href == "/" %}
|
|
||||||
{% set is_active = current_path == "/" %}
|
|
||||||
{% else %}
|
|
||||||
{% set is_active = current_path.startswith(href) %}
|
|
||||||
{% endif %}
|
|
||||||
<a
|
|
||||||
href="{{ href }}"
|
|
||||||
class="sidebar-link{% if is_active %} is-active{% endif %}"
|
|
||||||
>
|
|
||||||
{{ label }}
|
|
||||||
</a>
|
|
||||||
{% endfor %}
|
|
||||||
</nav>
|
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
49
templates/partials/sidebar_nav.html
Normal file
49
templates/partials/sidebar_nav.html
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
{% set nav_groups = [ { "label": "Dashboard", "links": [ {"href": "/", "label":
|
||||||
|
"Dashboard"}, ], }, { "label": "Overview", "links": [ {"href": "/ui/parameters",
|
||||||
|
"label": "Parameters"}, {"href": "/ui/costs", "label": "Costs"}, {"href":
|
||||||
|
"/ui/consumption", "label": "Consumption"}, {"href": "/ui/production", "label":
|
||||||
|
"Production"}, { "href": "/ui/equipment", "label": "Equipment", "children": [
|
||||||
|
{"href": "/ui/maintenance", "label": "Maintenance"}, ], }, ], }, { "label":
|
||||||
|
"Simulations", "links": [ {"href": "/ui/simulations", "label": "Simulations"},
|
||||||
|
], }, { "label": "Analytics", "links": [ {"href": "/ui/reporting", "label":
|
||||||
|
"Reporting"}, ], }, { "label": "Settings", "links": [ { "href": "/ui/settings",
|
||||||
|
"label": "Settings", "children": [ {"href": "/theme-settings", "label":
|
||||||
|
"Themes"}, {"href": "/ui/currencies", "label": "Currency Management"}, ], }, ],
|
||||||
|
}, ] %}
|
||||||
|
|
||||||
|
<nav class="sidebar-nav" aria-label="Primary navigation">
|
||||||
|
{% set current_path = request.url.path if request else "" %} {% for group in
|
||||||
|
nav_groups %}
|
||||||
|
<div class="sidebar-section">
|
||||||
|
<div class="sidebar-section-label">{{ group.label }}</div>
|
||||||
|
<div class="sidebar-section-links">
|
||||||
|
{% for link in group.links %} {% set href = link.href %} {% if href == "/"
|
||||||
|
%} {% set is_active = current_path == "/" %} {% else %} {% set is_active =
|
||||||
|
current_path.startswith(href) %} {% endif %}
|
||||||
|
<div class="sidebar-link-block">
|
||||||
|
<a
|
||||||
|
href="{{ href }}"
|
||||||
|
class="sidebar-link{% if is_active %} is-active{% endif %}"
|
||||||
|
>
|
||||||
|
{{ link.label }}
|
||||||
|
</a>
|
||||||
|
{% if link.children %}
|
||||||
|
<div class="sidebar-sublinks">
|
||||||
|
{% for child in link.children %} {% if child.href == "/" %} {% set
|
||||||
|
child_active = current_path == "/" %} {% else %} {% set child_active =
|
||||||
|
current_path.startswith(child.href) %} {% endif %}
|
||||||
|
<a
|
||||||
|
href="{{ child.href }}"
|
||||||
|
class="sidebar-sublink{% if child_active %} is-active{% endif %}"
|
||||||
|
>
|
||||||
|
{{ child.label }}
|
||||||
|
</a>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</nav>
|
||||||
31
templates/profile.html
Normal file
31
templates/profile.html
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Profile{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<h1>User Profile</h1>
|
||||||
|
<p>Username: <span id="profile-username"></span></p>
|
||||||
|
<p>Email: <span id="profile-email"></span></p>
|
||||||
|
<button id="edit-profile-button">Edit Profile</button>
|
||||||
|
|
||||||
|
<div id="edit-profile-form" style="display:none;">
|
||||||
|
<h2>Edit Profile</h2>
|
||||||
|
<form>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="edit-username">Username:</label>
|
||||||
|
<input type="text" id="edit-username" name="username">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="edit-email">Email:</label>
|
||||||
|
<input type="email" id="edit-email" name="email">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="edit-password">New Password:</label>
|
||||||
|
<input type="password" id="edit-password" name="password">
|
||||||
|
</div>
|
||||||
|
<button type="submit">Save Changes</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
25
templates/register.html
Normal file
25
templates/register.html
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Register{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<h1>Register</h1>
|
||||||
|
<form id="register-form">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="username">Username:</label>
|
||||||
|
<input type="text" id="username" name="username" required>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="email">Email:</label>
|
||||||
|
<input type="email" id="email" name="email" required>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="password">Password:</label>
|
||||||
|
<input type="password" id="password" name="password" required>
|
||||||
|
</div>
|
||||||
|
<button type="submit">Register</button>
|
||||||
|
</form>
|
||||||
|
<p>Already have an account? <a href="/login">Login here</a></p>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
26
templates/settings.html
Normal file
26
templates/settings.html
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{% extends "base.html" %} {% block title %}Settings · CalMiner{% endblock %} {%
|
||||||
|
block content %}
|
||||||
|
<section class="page-header">
|
||||||
|
<div>
|
||||||
|
<h1>Settings</h1>
|
||||||
|
<p class="page-subtitle">
|
||||||
|
Configure platform defaults and administrative options.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
<section class="settings-grid">
|
||||||
|
<article class="settings-card">
|
||||||
|
<h2>Currency Management</h2>
|
||||||
|
<p>
|
||||||
|
Manage available currencies, symbols, and default selections from the
|
||||||
|
Currency Management page.
|
||||||
|
</p>
|
||||||
|
<a class="button-link" href="/ui/currencies">Go to Currency Management</a>
|
||||||
|
</article>
|
||||||
|
<article class="settings-card">
|
||||||
|
<h2>Themes</h2>
|
||||||
|
<p>Adjust CalMiner theme colors and preview changes instantly.</p>
|
||||||
|
<a class="button-link" href="/theme-settings">Go to Theme Settings</a>
|
||||||
|
</article>
|
||||||
|
</section>
|
||||||
|
{% endblock %}
|
||||||
125
templates/theme_settings.html
Normal file
125
templates/theme_settings.html
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
{% extends "base.html" %} {% block title %}Theme Settings · CalMiner{% endblock
|
||||||
|
%} {% block content %}
|
||||||
|
<section class="page-header">
|
||||||
|
<div>
|
||||||
|
<h1>Theme Settings</h1>
|
||||||
|
<p class="page-subtitle">
|
||||||
|
Adjust CalMiner theme colors and preview changes instantly.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="panel" id="theme-settings" data-api="/api/settings/css">
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Theme Colors</h2>
|
||||||
|
<p class="chart-subtitle">
|
||||||
|
Update global CSS variables to customize CalMiner's appearance.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
<form id="theme-settings-form" class="form-grid color-form-grid" novalidate>
|
||||||
|
{% for key, value in css_variables.items() %} {% set env_meta =
|
||||||
|
css_env_override_meta.get(key) %}
|
||||||
|
<label
|
||||||
|
class="color-form-field{% if env_meta %} is-env-override{% endif %}"
|
||||||
|
data-variable="{{ key }}"
|
||||||
|
>
|
||||||
|
<span class="color-field-header">
|
||||||
|
<span class="color-field-name">{{ key }}</span>
|
||||||
|
<span class="color-field-default"
|
||||||
|
>Default: {{ css_defaults[key] }}</span
|
||||||
|
>
|
||||||
|
</span>
|
||||||
|
<span class="color-field-helper" id="color-helper-{{ loop.index }}"
|
||||||
|
>Accepts hex, rgb(a), or hsl(a) values.</span
|
||||||
|
>
|
||||||
|
{% if env_meta %}
|
||||||
|
<span class="color-env-flag"
|
||||||
|
>Managed via {{ env_meta.env_var }} (read-only)</span
|
||||||
|
>
|
||||||
|
{% endif %}
|
||||||
|
<span class="color-input-row">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="{{ key }}"
|
||||||
|
class="color-value-input"
|
||||||
|
value="{{ value }}"
|
||||||
|
autocomplete="off"
|
||||||
|
aria-describedby="color-helper-{{ loop.index }}"
|
||||||
|
{%
|
||||||
|
if
|
||||||
|
env_meta
|
||||||
|
%}disabled
|
||||||
|
aria-disabled="true"
|
||||||
|
data-env-override="true"
|
||||||
|
{%
|
||||||
|
endif
|
||||||
|
%}
|
||||||
|
/>
|
||||||
|
<span
|
||||||
|
class="color-preview"
|
||||||
|
aria-hidden="true"
|
||||||
|
style="background: {{ value }}"
|
||||||
|
></span>
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<div class="button-row">
|
||||||
|
<button type="submit" class="btn primary">Save Theme</button>
|
||||||
|
<button type="button" class="btn" id="theme-settings-reset">
|
||||||
|
Reset to Defaults
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
{% from "partials/components.html" import feedback with context %} {{
|
||||||
|
feedback("theme-settings-feedback") }}
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="panel" id="theme-env-overrides">
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Environment Overrides</h2>
|
||||||
|
<p class="chart-subtitle">
|
||||||
|
The following CSS variables are controlled via environment variables and
|
||||||
|
take precedence over database values.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
{% if css_env_override_rows %}
|
||||||
|
<div class="table-container env-overrides-table">
|
||||||
|
<table aria-label="Environment-controlled theme variables">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">CSS Variable</th>
|
||||||
|
<th scope="col">Environment Variable</th>
|
||||||
|
<th scope="col">Value</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for row in css_env_override_rows %}
|
||||||
|
<tr>
|
||||||
|
<td><code>{{ row.css_key }}</code></td>
|
||||||
|
<td><code>{{ row.env_var }}</code></td>
|
||||||
|
<td><code>{{ row.value }}</code></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<p class="empty-state">No environment overrides configured.</p>
|
||||||
|
{% endif %}
|
||||||
|
</section>
|
||||||
|
{% endblock %} {% block scripts %} {{ super() }}
|
||||||
|
<script id="theme-settings-data" type="application/json">
|
||||||
|
{{ {
|
||||||
|
"variables": css_variables,
|
||||||
|
"defaults": css_defaults,
|
||||||
|
"envOverrides": css_env_overrides,
|
||||||
|
"envSources": css_env_override_rows
|
||||||
|
} | tojson }}
|
||||||
|
</script>
|
||||||
|
<script src="/static/js/settings.js"></script>
|
||||||
|
{% endblock %}
|
||||||
@@ -4,6 +4,7 @@ import time
|
|||||||
from typing import Dict, Generator
|
from typing import Dict, Generator
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# type: ignore[import]
|
# type: ignore[import]
|
||||||
from playwright.sync_api import Browser, Page, Playwright, sync_playwright
|
from playwright.sync_api import Browser, Page, Playwright, sync_playwright
|
||||||
|
|
||||||
@@ -70,10 +71,17 @@ def seed_default_currencies(live_server: str) -> None:
|
|||||||
|
|
||||||
seeds = [
|
seeds = [
|
||||||
{"code": "EUR", "name": "Euro", "symbol": "EUR", "is_active": True},
|
{"code": "EUR", "name": "Euro", "symbol": "EUR", "is_active": True},
|
||||||
{"code": "CLP", "name": "Chilean Peso", "symbol": "CLP$", "is_active": True},
|
{
|
||||||
|
"code": "CLP",
|
||||||
|
"name": "Chilean Peso",
|
||||||
|
"symbol": "CLP$",
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
with httpx.Client(base_url=live_server, timeout=5.0, trust_env=False) as client:
|
with httpx.Client(
|
||||||
|
base_url=live_server, timeout=5.0, trust_env=False
|
||||||
|
) as client:
|
||||||
try:
|
try:
|
||||||
response = client.get("/api/currencies/?include_inactive=true")
|
response = client.get("/api/currencies/?include_inactive=true")
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
@@ -128,8 +136,12 @@ def page(browser: Browser, live_server: str) -> Generator[Page, None, None]:
|
|||||||
def _prepare_database_environment(env: Dict[str, str]) -> Dict[str, str]:
|
def _prepare_database_environment(env: Dict[str, str]) -> Dict[str, str]:
|
||||||
"""Ensure granular database env vars are available for the app under test."""
|
"""Ensure granular database env vars are available for the app under test."""
|
||||||
|
|
||||||
required = ("DATABASE_HOST", "DATABASE_USER",
|
required = (
|
||||||
"DATABASE_NAME", "DATABASE_PASSWORD")
|
"DATABASE_HOST",
|
||||||
|
"DATABASE_USER",
|
||||||
|
"DATABASE_NAME",
|
||||||
|
"DATABASE_PASSWORD",
|
||||||
|
)
|
||||||
if all(env.get(key) for key in required):
|
if all(env.get(key) for key in required):
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,9 @@ def test_consumption_form_loads(page: Page):
|
|||||||
"""Verify the consumption form page loads correctly."""
|
"""Verify the consumption form page loads correctly."""
|
||||||
page.goto("/ui/consumption")
|
page.goto("/ui/consumption")
|
||||||
expect(page).to_have_title("Consumption · CalMiner")
|
expect(page).to_have_title("Consumption · CalMiner")
|
||||||
expect(page.locator("h2:has-text('Add Consumption Record')")).to_be_visible()
|
expect(
|
||||||
|
page.locator("h2:has-text('Add Consumption Record')")
|
||||||
|
).to_be_visible()
|
||||||
|
|
||||||
|
|
||||||
def test_create_consumption_item(page: Page):
|
def test_create_consumption_item(page: Page):
|
||||||
|
|||||||
@@ -55,7 +55,9 @@ def test_create_capex_and_opex_items(page: Page):
|
|||||||
).to_be_visible()
|
).to_be_visible()
|
||||||
|
|
||||||
# Verify the feedback messages.
|
# Verify the feedback messages.
|
||||||
expect(page.locator("#capex-feedback")
|
expect(page.locator("#capex-feedback")).to_have_text(
|
||||||
).to_have_text("Entry saved successfully.")
|
"Entry saved successfully."
|
||||||
expect(page.locator("#opex-feedback")
|
)
|
||||||
).to_have_text("Entry saved successfully.")
|
expect(page.locator("#opex-feedback")).to_have_text(
|
||||||
|
"Entry saved successfully."
|
||||||
|
)
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ def _unique_currency_code(existing: set[str]) -> str:
|
|||||||
if candidate not in existing and candidate != "USD":
|
if candidate not in existing and candidate != "USD":
|
||||||
return candidate
|
return candidate
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"Unable to generate a unique currency code for the test run.")
|
"Unable to generate a unique currency code for the test run."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _metric_value(page: Page, element_id: str) -> int:
|
def _metric_value(page: Page, element_id: str) -> int:
|
||||||
@@ -42,8 +43,9 @@ def test_currency_workflow_create_update_toggle(page: Page) -> None:
|
|||||||
expect(page.locator("h2:has-text('Currency Overview')")).to_be_visible()
|
expect(page.locator("h2:has-text('Currency Overview')")).to_be_visible()
|
||||||
|
|
||||||
code_cells = page.locator("#currencies-table-body tr td:nth-child(1)")
|
code_cells = page.locator("#currencies-table-body tr td:nth-child(1)")
|
||||||
existing_codes = {text.strip().upper()
|
existing_codes = {
|
||||||
for text in code_cells.all_inner_texts()}
|
text.strip().upper() for text in code_cells.all_inner_texts()
|
||||||
|
}
|
||||||
|
|
||||||
total_before = _metric_value(page, "currency-metric-total")
|
total_before = _metric_value(page, "currency-metric-total")
|
||||||
active_before = _metric_value(page, "currency-metric-active")
|
active_before = _metric_value(page, "currency-metric-active")
|
||||||
@@ -109,7 +111,9 @@ def test_currency_workflow_create_update_toggle(page: Page) -> None:
|
|||||||
toggle_button = row.locator("button[data-action='toggle']")
|
toggle_button = row.locator("button[data-action='toggle']")
|
||||||
expect(toggle_button).to_have_text("Activate")
|
expect(toggle_button).to_have_text("Activate")
|
||||||
|
|
||||||
with page.expect_response(f"**/api/currencies/{new_code}/activation") as toggle_info:
|
with page.expect_response(
|
||||||
|
f"**/api/currencies/{new_code}/activation"
|
||||||
|
) as toggle_info:
|
||||||
toggle_button.click()
|
toggle_button.click()
|
||||||
toggle_response = toggle_info.value
|
toggle_response = toggle_info.value
|
||||||
assert toggle_response.status == 200
|
assert toggle_response.status == 200
|
||||||
@@ -126,5 +130,6 @@ def test_currency_workflow_create_update_toggle(page: Page) -> None:
|
|||||||
_expect_feedback(page, f"Currency {new_code} activated.")
|
_expect_feedback(page, f"Currency {new_code} activated.")
|
||||||
|
|
||||||
expect(row.locator("td").nth(3)).to_contain_text("Active")
|
expect(row.locator("td").nth(3)).to_contain_text("Active")
|
||||||
expect(row.locator("button[data-action='toggle']")
|
expect(row.locator("button[data-action='toggle']")).to_have_text(
|
||||||
).to_have_text("Deactivate")
|
"Deactivate"
|
||||||
|
)
|
||||||
|
|||||||
@@ -38,11 +38,8 @@ def test_create_equipment_item(page: Page):
|
|||||||
# Verify the new item appears in the table.
|
# Verify the new item appears in the table.
|
||||||
page.select_option("#equipment-scenario-filter", label=scenario_name)
|
page.select_option("#equipment-scenario-filter", label=scenario_name)
|
||||||
expect(
|
expect(
|
||||||
page.locator("#equipment-table-body tr").filter(
|
page.locator("#equipment-table-body tr").filter(has_text=equipment_name)
|
||||||
has_text=equipment_name
|
|
||||||
)
|
|
||||||
).to_be_visible()
|
).to_be_visible()
|
||||||
|
|
||||||
# Verify the feedback message.
|
# Verify the feedback message.
|
||||||
expect(page.locator("#equipment-feedback")
|
expect(page.locator("#equipment-feedback")).to_have_text("Equipment saved.")
|
||||||
).to_have_text("Equipment saved.")
|
|
||||||
|
|||||||
@@ -53,5 +53,6 @@ def test_create_maintenance_item(page: Page):
|
|||||||
).to_be_visible()
|
).to_be_visible()
|
||||||
|
|
||||||
# Verify the feedback message.
|
# Verify the feedback message.
|
||||||
expect(page.locator("#maintenance-feedback")
|
expect(page.locator("#maintenance-feedback")).to_have_text(
|
||||||
).to_have_text("Maintenance entry saved.")
|
"Maintenance entry saved."
|
||||||
|
)
|
||||||
|
|||||||
@@ -43,5 +43,6 @@ def test_create_production_item(page: Page):
|
|||||||
).to_be_visible()
|
).to_be_visible()
|
||||||
|
|
||||||
# Verify the feedback message.
|
# Verify the feedback message.
|
||||||
expect(page.locator("#production-feedback")
|
expect(page.locator("#production-feedback")).to_have_text(
|
||||||
).to_have_text("Production output saved.")
|
"Production output saved."
|
||||||
|
)
|
||||||
|
|||||||
@@ -39,4 +39,5 @@ def test_create_new_scenario(page: Page):
|
|||||||
feedback = page.locator("#feedback")
|
feedback = page.locator("#feedback")
|
||||||
expect(feedback).to_be_visible()
|
expect(feedback).to_be_visible()
|
||||||
expect(feedback).to_have_text(
|
expect(feedback).to_have_text(
|
||||||
f'Scenario "{scenario_name}" created successfully.')
|
f'Scenario "{scenario_name}" created successfully.'
|
||||||
|
)
|
||||||
|
|||||||
@@ -5,8 +5,13 @@ from playwright.sync_api import Page, expect
|
|||||||
UI_ROUTES = [
|
UI_ROUTES = [
|
||||||
("/", "Dashboard · CalMiner", "Operations Overview"),
|
("/", "Dashboard · CalMiner", "Operations Overview"),
|
||||||
("/ui/dashboard", "Dashboard · CalMiner", "Operations Overview"),
|
("/ui/dashboard", "Dashboard · CalMiner", "Operations Overview"),
|
||||||
("/ui/scenarios", "Scenario Management · CalMiner", "Create a New Scenario"),
|
(
|
||||||
|
"/ui/scenarios",
|
||||||
|
"Scenario Management · CalMiner",
|
||||||
|
"Create a New Scenario",
|
||||||
|
),
|
||||||
("/ui/parameters", "Process Parameters · CalMiner", "Scenario Parameters"),
|
("/ui/parameters", "Process Parameters · CalMiner", "Scenario Parameters"),
|
||||||
|
("/ui/settings", "Settings · CalMiner", "Settings"),
|
||||||
("/ui/costs", "Costs · CalMiner", "Cost Overview"),
|
("/ui/costs", "Costs · CalMiner", "Cost Overview"),
|
||||||
("/ui/consumption", "Consumption · CalMiner", "Consumption Tracking"),
|
("/ui/consumption", "Consumption · CalMiner", "Consumption Tracking"),
|
||||||
("/ui/production", "Production · CalMiner", "Production Output"),
|
("/ui/production", "Production · CalMiner", "Production Output"),
|
||||||
@@ -19,11 +24,62 @@ UI_ROUTES = [
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("url, title, heading", UI_ROUTES)
|
@pytest.mark.parametrize("url, title, heading", UI_ROUTES)
|
||||||
def test_ui_pages_load_correctly(page: Page, url: str, title: str, heading: str):
|
def test_ui_pages_load_correctly(
|
||||||
|
page: Page, url: str, title: str, heading: str
|
||||||
|
):
|
||||||
"""Verify that all UI pages load with the correct title and a visible heading."""
|
"""Verify that all UI pages load with the correct title and a visible heading."""
|
||||||
page.goto(url)
|
page.goto(url)
|
||||||
expect(page).to_have_title(title)
|
expect(page).to_have_title(title)
|
||||||
# The app uses a mix of h1 and h2 for main page headings.
|
# The app uses a mix of h1 and h2 for main page headings.
|
||||||
heading_locator = page.locator(
|
heading_locator = page.locator(
|
||||||
f"h1:has-text('{heading}'), h2:has-text('{heading}')")
|
f"h1:has-text('{heading}'), h2:has-text('{heading}')"
|
||||||
|
)
|
||||||
expect(heading_locator.first).to_be_visible()
|
expect(heading_locator.first).to_be_visible()
|
||||||
|
|
||||||
|
|
||||||
|
def test_settings_theme_form_interaction(page: Page):
|
||||||
|
page.goto("/theme-settings")
|
||||||
|
expect(page).to_have_title("Theme Settings · CalMiner")
|
||||||
|
|
||||||
|
env_rows = page.locator("#theme-env-overrides tbody tr")
|
||||||
|
disabled_inputs = page.locator(
|
||||||
|
"#theme-settings-form input.color-value-input[disabled]"
|
||||||
|
)
|
||||||
|
env_row_count = env_rows.count()
|
||||||
|
disabled_count = disabled_inputs.count()
|
||||||
|
assert disabled_count == env_row_count
|
||||||
|
|
||||||
|
color_input = page.locator(
|
||||||
|
"#theme-settings-form input[name='--color-primary']"
|
||||||
|
)
|
||||||
|
expect(color_input).to_be_visible()
|
||||||
|
expect(color_input).to_be_enabled()
|
||||||
|
|
||||||
|
original_value = color_input.input_value()
|
||||||
|
candidate_values = ("#114455", "#225566")
|
||||||
|
new_value = (
|
||||||
|
candidate_values[0]
|
||||||
|
if original_value != candidate_values[0]
|
||||||
|
else candidate_values[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
color_input.fill(new_value)
|
||||||
|
page.click("#theme-settings-form button[type='submit']")
|
||||||
|
|
||||||
|
feedback = page.locator("#theme-settings-feedback")
|
||||||
|
expect(feedback).to_contain_text("updated successfully")
|
||||||
|
|
||||||
|
computed_color = page.evaluate(
|
||||||
|
"() => getComputedStyle(document.documentElement).getPropertyValue('--color-primary').trim()"
|
||||||
|
)
|
||||||
|
assert computed_color.lower() == new_value.lower()
|
||||||
|
|
||||||
|
page.reload()
|
||||||
|
expect(color_input).to_have_value(new_value)
|
||||||
|
|
||||||
|
color_input.fill(original_value)
|
||||||
|
page.click("#theme-settings-form button[type='submit']")
|
||||||
|
expect(feedback).to_contain_text("updated successfully")
|
||||||
|
|
||||||
|
page.reload()
|
||||||
|
expect(color_input).to_have_value(original_value)
|
||||||
|
|||||||
@@ -27,36 +27,47 @@ engine = create_engine(
|
|||||||
poolclass=StaticPool,
|
poolclass=StaticPool,
|
||||||
)
|
)
|
||||||
TestingSessionLocal = sessionmaker(
|
TestingSessionLocal = sessionmaker(
|
||||||
autocommit=False, autoflush=False, bind=engine)
|
autocommit=False, autoflush=False, bind=engine
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
def setup_database() -> Generator[None, None, None]:
|
def setup_database() -> Generator[None, None, None]:
|
||||||
# Ensure all model metadata is registered before creating tables
|
# Ensure all model metadata is registered before creating tables
|
||||||
from models import (
|
from models import (
|
||||||
|
application_setting,
|
||||||
capex,
|
capex,
|
||||||
consumption,
|
consumption,
|
||||||
|
currency,
|
||||||
distribution,
|
distribution,
|
||||||
equipment,
|
equipment,
|
||||||
maintenance,
|
maintenance,
|
||||||
opex,
|
opex,
|
||||||
parameters,
|
parameters,
|
||||||
production_output,
|
production_output,
|
||||||
|
role,
|
||||||
scenario,
|
scenario,
|
||||||
simulation_result,
|
simulation_result,
|
||||||
|
theme_setting,
|
||||||
|
user,
|
||||||
) # noqa: F401 - imported for side effects
|
) # noqa: F401 - imported for side effects
|
||||||
|
|
||||||
_ = (
|
_ = (
|
||||||
capex,
|
capex,
|
||||||
consumption,
|
consumption,
|
||||||
|
currency,
|
||||||
distribution,
|
distribution,
|
||||||
equipment,
|
equipment,
|
||||||
maintenance,
|
maintenance,
|
||||||
|
application_setting,
|
||||||
opex,
|
opex,
|
||||||
parameters,
|
parameters,
|
||||||
production_output,
|
production_output,
|
||||||
|
role,
|
||||||
scenario,
|
scenario,
|
||||||
simulation_result,
|
simulation_result,
|
||||||
|
theme_setting,
|
||||||
|
user,
|
||||||
)
|
)
|
||||||
|
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
@@ -66,10 +77,13 @@ def setup_database() -> Generator[None, None, None]:
|
|||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def db_session() -> Generator[Session, None, None]:
|
def db_session() -> Generator[Session, None, None]:
|
||||||
|
Base.metadata.drop_all(bind=engine)
|
||||||
|
Base.metadata.create_all(bind=engine)
|
||||||
session = TestingSessionLocal()
|
session = TestingSessionLocal()
|
||||||
try:
|
try:
|
||||||
yield session
|
yield session
|
||||||
finally:
|
finally:
|
||||||
|
session.rollback()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
@@ -81,22 +95,23 @@ def api_client(db_session: Session) -> Generator[TestClient, None, None]:
|
|||||||
finally:
|
finally:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from routes import dependencies as route_dependencies
|
from routes.dependencies import get_db
|
||||||
|
|
||||||
app.dependency_overrides[route_dependencies.get_db] = override_get_db
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
|
||||||
with TestClient(app) as client:
|
with TestClient(app) as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
app.dependency_overrides.pop(route_dependencies.get_db, None)
|
app.dependency_overrides.pop(get_db, None)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def seeded_ui_data(db_session: Session) -> Generator[Dict[str, Any], None, None]:
|
def seeded_ui_data(
|
||||||
|
db_session: Session,
|
||||||
|
) -> Generator[Dict[str, Any], None, None]:
|
||||||
"""Populate a scenario with representative related records for UI tests."""
|
"""Populate a scenario with representative related records for UI tests."""
|
||||||
scenario_name = f"Scenario Alpha {uuid4()}"
|
scenario_name = f"Scenario Alpha {uuid4()}"
|
||||||
scenario = Scenario(name=scenario_name,
|
scenario = Scenario(name=scenario_name, description="Seeded UI scenario")
|
||||||
description="Seeded UI scenario")
|
|
||||||
db_session.add(scenario)
|
db_session.add(scenario)
|
||||||
db_session.flush()
|
db_session.flush()
|
||||||
|
|
||||||
@@ -156,7 +171,9 @@ def seeded_ui_data(db_session: Session) -> Generator[Dict[str, Any], None, None]
|
|||||||
iteration=index,
|
iteration=index,
|
||||||
result=value,
|
result=value,
|
||||||
)
|
)
|
||||||
for index, value in enumerate((950_000.0, 975_000.0, 990_000.0), start=1)
|
for index, value in enumerate(
|
||||||
|
(950_000.0, 975_000.0, 990_000.0), start=1
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
db_session.add(maintenance)
|
db_session.add(maintenance)
|
||||||
@@ -191,11 +208,15 @@ def seeded_ui_data(db_session: Session) -> Generator[Dict[str, Any], None, None]
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def invalid_request_payloads(db_session: Session) -> Generator[Dict[str, Any], None, None]:
|
def invalid_request_payloads(
|
||||||
|
db_session: Session,
|
||||||
|
) -> Generator[Dict[str, Any], None, None]:
|
||||||
"""Provide reusable invalid request bodies for exercising validation branches."""
|
"""Provide reusable invalid request bodies for exercising validation branches."""
|
||||||
duplicate_name = f"Scenario Duplicate {uuid4()}"
|
duplicate_name = f"Scenario Duplicate {uuid4()}"
|
||||||
existing = Scenario(name=duplicate_name,
|
existing = Scenario(
|
||||||
description="Existing scenario for duplicate checks")
|
name=duplicate_name,
|
||||||
|
description="Existing scenario for duplicate checks",
|
||||||
|
)
|
||||||
db_session.add(existing)
|
db_session.add(existing)
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|||||||
231
tests/unit/test_auth.py
Normal file
231
tests/unit/test_auth.py
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
from services.security import get_password_hash, verify_password
|
||||||
|
|
||||||
|
|
||||||
|
def test_password_hashing():
|
||||||
|
password = "testpassword"
|
||||||
|
hashed_password = get_password_hash(password)
|
||||||
|
assert verify_password(password, hashed_password)
|
||||||
|
assert not verify_password("wrongpassword", hashed_password)
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_user(api_client):
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "testuser",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"password": "testpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "testuser"
|
||||||
|
assert data["email"] == "test@example.com"
|
||||||
|
assert "id" in data
|
||||||
|
assert "role_id" in data
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "testuser",
|
||||||
|
"email": "another@example.com",
|
||||||
|
"password": "testpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"detail": "Username already registered"}
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "anotheruser",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"password": "testpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"detail": "Email already registered"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_login_user(api_client):
|
||||||
|
# Register a user first
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "loginuser",
|
||||||
|
"email": "login@example.com",
|
||||||
|
"password": "loginpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "loginuser", "password": "loginpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "access_token" in data
|
||||||
|
assert data["token_type"] == "bearer"
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "loginuser", "password": "wrongpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json() == {"detail": "Incorrect username or password"}
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "nonexistent", "password": "password"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json() == {"detail": "Incorrect username or password"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_users_me(api_client):
|
||||||
|
# Register a user first
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "profileuser",
|
||||||
|
"email": "profile@example.com",
|
||||||
|
"password": "profilepassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Login to get a token
|
||||||
|
login_response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "profileuser", "password": "profilepassword"},
|
||||||
|
)
|
||||||
|
token = login_response.json()["access_token"]
|
||||||
|
|
||||||
|
response = api_client.get(
|
||||||
|
"/users/me", headers={"Authorization": f"Bearer {token}"}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "profileuser"
|
||||||
|
assert data["email"] == "profile@example.com"
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_users_me(api_client):
|
||||||
|
# Register a user first
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "updateuser",
|
||||||
|
"email": "update@example.com",
|
||||||
|
"password": "updatepassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Login to get a token
|
||||||
|
login_response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "updateuser", "password": "updatepassword"},
|
||||||
|
)
|
||||||
|
token = login_response.json()["access_token"]
|
||||||
|
|
||||||
|
response = api_client.put(
|
||||||
|
"/users/me",
|
||||||
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
|
json={
|
||||||
|
"username": "updateduser",
|
||||||
|
"email": "updated@example.com",
|
||||||
|
"password": "newpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "updateduser"
|
||||||
|
assert data["email"] == "updated@example.com"
|
||||||
|
|
||||||
|
# Verify password change
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "updateduser", "password": "newpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
token = response.json()["access_token"]
|
||||||
|
|
||||||
|
# Test username already taken
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "anotherupdateuser",
|
||||||
|
"email": "anotherupdate@example.com",
|
||||||
|
"password": "password",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response = api_client.put(
|
||||||
|
"/users/me",
|
||||||
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
|
json={
|
||||||
|
"username": "anotherupdateuser",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"detail": "Username already taken"}
|
||||||
|
|
||||||
|
# Test email already registered
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "yetanotheruser",
|
||||||
|
"email": "yetanother@example.com",
|
||||||
|
"password": "password",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response = api_client.put(
|
||||||
|
"/users/me",
|
||||||
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
|
json={
|
||||||
|
"email": "yetanother@example.com",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"detail": "Email already registered"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_forgot_password(api_client):
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/forgot-password", json={"email": "nonexistent@example.com"}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"message": "Password reset email sent (not really)"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_reset_password(api_client):
|
||||||
|
# Register a user first
|
||||||
|
api_client.post(
|
||||||
|
"/users/register",
|
||||||
|
json={
|
||||||
|
"username": "resetuser",
|
||||||
|
"email": "reset@example.com",
|
||||||
|
"password": "oldpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/reset-password",
|
||||||
|
json={
|
||||||
|
"token": "resetuser", # Use username as token for test
|
||||||
|
"new_password": "newpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"message": "Password has been reset successfully"}
|
||||||
|
|
||||||
|
# Verify password change
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "resetuser", "password": "newpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
response = api_client.post(
|
||||||
|
"/users/login",
|
||||||
|
json={"username": "resetuser", "password": "oldpassword"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
@@ -57,8 +57,11 @@ def test_list_consumption_returns_created_items(client: TestClient) -> None:
|
|||||||
|
|
||||||
list_response = client.get("/api/consumption/")
|
list_response = client.get("/api/consumption/")
|
||||||
assert list_response.status_code == 200
|
assert list_response.status_code == 200
|
||||||
items = [item for item in list_response.json(
|
items = [
|
||||||
) if item["scenario_id"] == scenario_id]
|
item
|
||||||
|
for item in list_response.json()
|
||||||
|
if item["scenario_id"] == scenario_id
|
||||||
|
]
|
||||||
assert {item["amount"] for item in items} == set(values)
|
assert {item["amount"] for item in items} == set(values)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -47,8 +47,9 @@ def test_create_and_list_capex_and_opex():
|
|||||||
resp3 = client.get("/api/costs/capex")
|
resp3 = client.get("/api/costs/capex")
|
||||||
assert resp3.status_code == 200
|
assert resp3.status_code == 200
|
||||||
data = resp3.json()
|
data = resp3.json()
|
||||||
assert any(item["amount"] == 1000.0 and item["scenario_id"]
|
assert any(
|
||||||
== sid for item in data)
|
item["amount"] == 1000.0 and item["scenario_id"] == sid for item in data
|
||||||
|
)
|
||||||
|
|
||||||
opex_payload = {
|
opex_payload = {
|
||||||
"scenario_id": sid,
|
"scenario_id": sid,
|
||||||
@@ -66,8 +67,10 @@ def test_create_and_list_capex_and_opex():
|
|||||||
resp5 = client.get("/api/costs/opex")
|
resp5 = client.get("/api/costs/opex")
|
||||||
assert resp5.status_code == 200
|
assert resp5.status_code == 200
|
||||||
data_o = resp5.json()
|
data_o = resp5.json()
|
||||||
assert any(item["amount"] == 500.0 and item["scenario_id"]
|
assert any(
|
||||||
== sid for item in data_o)
|
item["amount"] == 500.0 and item["scenario_id"] == sid
|
||||||
|
for item in data_o
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_capex_entries():
|
def test_multiple_capex_entries():
|
||||||
@@ -88,8 +91,9 @@ def test_multiple_capex_entries():
|
|||||||
resp = client.get("/api/costs/capex")
|
resp = client.get("/api/costs/capex")
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
retrieved_amounts = [item["amount"]
|
retrieved_amounts = [
|
||||||
for item in data if item["scenario_id"] == sid]
|
item["amount"] for item in data if item["scenario_id"] == sid
|
||||||
|
]
|
||||||
for amount in amounts:
|
for amount in amounts:
|
||||||
assert amount in retrieved_amounts
|
assert amount in retrieved_amounts
|
||||||
|
|
||||||
@@ -112,7 +116,8 @@ def test_multiple_opex_entries():
|
|||||||
resp = client.get("/api/costs/opex")
|
resp = client.get("/api/costs/opex")
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
retrieved_amounts = [item["amount"]
|
retrieved_amounts = [
|
||||||
for item in data if item["scenario_id"] == sid]
|
item["amount"] for item in data if item["scenario_id"] == sid
|
||||||
|
]
|
||||||
for amount in amounts:
|
for amount in amounts:
|
||||||
assert amount in retrieved_amounts
|
assert amount in retrieved_amounts
|
||||||
|
|||||||
@@ -14,7 +14,13 @@ def _cleanup_currencies(db_session):
|
|||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
def _assert_currency(payload: Dict[str, object], code: str, name: str, symbol: str | None, is_active: bool) -> None:
|
def _assert_currency(
|
||||||
|
payload: Dict[str, object],
|
||||||
|
code: str,
|
||||||
|
name: str,
|
||||||
|
symbol: str | None,
|
||||||
|
is_active: bool,
|
||||||
|
) -> None:
|
||||||
assert payload["code"] == code
|
assert payload["code"] == code
|
||||||
assert payload["name"] == name
|
assert payload["name"] == name
|
||||||
assert payload["is_active"] is is_active
|
assert payload["is_active"] is is_active
|
||||||
@@ -47,13 +53,21 @@ def test_create_currency_success(api_client, db_session):
|
|||||||
def test_create_currency_conflict(api_client, db_session):
|
def test_create_currency_conflict(api_client, db_session):
|
||||||
api_client.post(
|
api_client.post(
|
||||||
"/api/currencies/",
|
"/api/currencies/",
|
||||||
json={"code": "CAD", "name": "Canadian Dollar",
|
json={
|
||||||
"symbol": "$", "is_active": True},
|
"code": "CAD",
|
||||||
|
"name": "Canadian Dollar",
|
||||||
|
"symbol": "$",
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
duplicate = api_client.post(
|
duplicate = api_client.post(
|
||||||
"/api/currencies/",
|
"/api/currencies/",
|
||||||
json={"code": "CAD", "name": "Canadian Dollar",
|
json={
|
||||||
"symbol": "$", "is_active": True},
|
"code": "CAD",
|
||||||
|
"name": "Canadian Dollar",
|
||||||
|
"symbol": "$",
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
assert duplicate.status_code == 409
|
assert duplicate.status_code == 409
|
||||||
|
|
||||||
@@ -61,8 +75,12 @@ def test_create_currency_conflict(api_client, db_session):
|
|||||||
def test_update_currency_fields(api_client, db_session):
|
def test_update_currency_fields(api_client, db_session):
|
||||||
api_client.post(
|
api_client.post(
|
||||||
"/api/currencies/",
|
"/api/currencies/",
|
||||||
json={"code": "GBP", "name": "British Pound",
|
json={
|
||||||
"symbol": "£", "is_active": True},
|
"code": "GBP",
|
||||||
|
"name": "British Pound",
|
||||||
|
"symbol": "£",
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
response = api_client.put(
|
response = api_client.put(
|
||||||
@@ -77,8 +95,12 @@ def test_update_currency_fields(api_client, db_session):
|
|||||||
def test_toggle_currency_activation(api_client, db_session):
|
def test_toggle_currency_activation(api_client, db_session):
|
||||||
api_client.post(
|
api_client.post(
|
||||||
"/api/currencies/",
|
"/api/currencies/",
|
||||||
json={"code": "AUD", "name": "Australian Dollar",
|
json={
|
||||||
"symbol": "A$", "is_active": True},
|
"code": "AUD",
|
||||||
|
"name": "Australian Dollar",
|
||||||
|
"symbol": "A$",
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
response = api_client.patch(
|
response = api_client.patch(
|
||||||
@@ -97,5 +119,7 @@ def test_default_currency_cannot_be_deactivated(api_client, db_session):
|
|||||||
json={"is_active": False},
|
json={"is_active": False},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert response.json()[
|
assert (
|
||||||
"detail"] == "The default currency cannot be deactivated."
|
response.json()["detail"]
|
||||||
|
== "The default currency cannot be deactivated."
|
||||||
|
)
|
||||||
|
|||||||
@@ -41,9 +41,10 @@ def test_create_capex_with_currency_code_and_list(api_client, seeded_currency):
|
|||||||
resp = api_client.post("/api/costs/capex", json=payload)
|
resp = api_client.post("/api/costs/capex", json=payload)
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
assert data.get("currency_code") == seeded_currency.code or data.get(
|
assert (
|
||||||
"currency", {}
|
data.get("currency_code") == seeded_currency.code
|
||||||
).get("code") == seeded_currency.code
|
or data.get("currency", {}).get("code") == seeded_currency.code
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_create_opex_with_currency_id(api_client, seeded_currency):
|
def test_create_opex_with_currency_id(api_client, seeded_currency):
|
||||||
|
|||||||
@@ -30,7 +30,9 @@ def _create_scenario_and_equipment(client: TestClient):
|
|||||||
return scenario_id, equipment_id
|
return scenario_id, equipment_id
|
||||||
|
|
||||||
|
|
||||||
def _create_maintenance_payload(equipment_id: int, scenario_id: int, description: str):
|
def _create_maintenance_payload(
|
||||||
|
equipment_id: int, scenario_id: int, description: str
|
||||||
|
):
|
||||||
return {
|
return {
|
||||||
"equipment_id": equipment_id,
|
"equipment_id": equipment_id,
|
||||||
"scenario_id": scenario_id,
|
"scenario_id": scenario_id,
|
||||||
@@ -43,7 +45,8 @@ def _create_maintenance_payload(equipment_id: int, scenario_id: int, description
|
|||||||
def test_create_and_list_maintenance(client: TestClient):
|
def test_create_and_list_maintenance(client: TestClient):
|
||||||
scenario_id, equipment_id = _create_scenario_and_equipment(client)
|
scenario_id, equipment_id = _create_scenario_and_equipment(client)
|
||||||
payload = _create_maintenance_payload(
|
payload = _create_maintenance_payload(
|
||||||
equipment_id, scenario_id, "Create maintenance")
|
equipment_id, scenario_id, "Create maintenance"
|
||||||
|
)
|
||||||
|
|
||||||
response = client.post("/api/maintenance/", json=payload)
|
response = client.post("/api/maintenance/", json=payload)
|
||||||
assert response.status_code == 201
|
assert response.status_code == 201
|
||||||
@@ -95,7 +98,8 @@ def test_update_maintenance(client: TestClient):
|
|||||||
}
|
}
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/maintenance/{maintenance_id}", json=update_payload)
|
f"/api/maintenance/{maintenance_id}", json=update_payload
|
||||||
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
updated = response.json()
|
updated = response.json()
|
||||||
assert updated["maintenance_date"] == "2025-11-01"
|
assert updated["maintenance_date"] == "2025-11-01"
|
||||||
@@ -108,7 +112,8 @@ def test_delete_maintenance(client: TestClient):
|
|||||||
create_response = client.post(
|
create_response = client.post(
|
||||||
"/api/maintenance/",
|
"/api/maintenance/",
|
||||||
json=_create_maintenance_payload(
|
json=_create_maintenance_payload(
|
||||||
equipment_id, scenario_id, "Delete maintenance"),
|
equipment_id, scenario_id, "Delete maintenance"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
assert create_response.status_code == 201
|
assert create_response.status_code == 201
|
||||||
maintenance_id = create_response.json()["id"]
|
maintenance_id = create_response.json()["id"]
|
||||||
|
|||||||
@@ -67,7 +67,10 @@ def test_create_and_list_parameter():
|
|||||||
|
|
||||||
def test_create_parameter_for_missing_scenario():
|
def test_create_parameter_for_missing_scenario():
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"scenario_id": 0, "name": "invalid", "value": 1.0}
|
"scenario_id": 0,
|
||||||
|
"name": "invalid",
|
||||||
|
"value": 1.0,
|
||||||
|
}
|
||||||
response = client.post("/api/parameters/", json=payload)
|
response = client.post("/api/parameters/", json=payload)
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
assert response.json()["detail"] == "Scenario not found"
|
assert response.json()["detail"] == "Scenario not found"
|
||||||
|
|||||||
@@ -42,7 +42,11 @@ def test_list_production_filters_by_scenario(client: TestClient) -> None:
|
|||||||
target_scenario = _create_scenario(client)
|
target_scenario = _create_scenario(client)
|
||||||
other_scenario = _create_scenario(client)
|
other_scenario = _create_scenario(client)
|
||||||
|
|
||||||
for scenario_id, amount in [(target_scenario, 100.0), (target_scenario, 150.0), (other_scenario, 200.0)]:
|
for scenario_id, amount in [
|
||||||
|
(target_scenario, 100.0),
|
||||||
|
(target_scenario, 150.0),
|
||||||
|
(other_scenario, 200.0),
|
||||||
|
]:
|
||||||
response = client.post(
|
response = client.post(
|
||||||
"/api/production/",
|
"/api/production/",
|
||||||
json={
|
json={
|
||||||
@@ -57,8 +61,11 @@ def test_list_production_filters_by_scenario(client: TestClient) -> None:
|
|||||||
|
|
||||||
list_response = client.get("/api/production/")
|
list_response = client.get("/api/production/")
|
||||||
assert list_response.status_code == 200
|
assert list_response.status_code == 200
|
||||||
items = [item for item in list_response.json()
|
items = [
|
||||||
if item["scenario_id"] == target_scenario]
|
item
|
||||||
|
for item in list_response.json()
|
||||||
|
if item["scenario_id"] == target_scenario
|
||||||
|
]
|
||||||
assert {item["amount"] for item in items} == {100.0, 150.0}
|
assert {item["amount"] for item in items} == {100.0, 150.0}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -50,9 +50,11 @@ def test_generate_report_with_values():
|
|||||||
|
|
||||||
|
|
||||||
def test_generate_report_single_value():
|
def test_generate_report_single_value():
|
||||||
report = generate_report([
|
report = generate_report(
|
||||||
{"iteration": 1, "result": 42.0},
|
[
|
||||||
])
|
{"iteration": 1, "result": 42.0},
|
||||||
|
]
|
||||||
|
)
|
||||||
assert report["count"] == 1
|
assert report["count"] == 1
|
||||||
assert report["std_dev"] == 0.0
|
assert report["std_dev"] == 0.0
|
||||||
assert report["variance"] == 0.0
|
assert report["variance"] == 0.0
|
||||||
@@ -105,8 +107,10 @@ def test_reporting_endpoint_success(client: TestClient):
|
|||||||
validation_error_cases: List[tuple[List[Any], str]] = [
|
validation_error_cases: List[tuple[List[Any], str]] = [
|
||||||
(["not-a-dict"], "Entry at index 0 must be an object"),
|
(["not-a-dict"], "Entry at index 0 must be an object"),
|
||||||
([{"iteration": 1}], "Entry at index 0 must include numeric 'result'"),
|
([{"iteration": 1}], "Entry at index 0 must include numeric 'result'"),
|
||||||
([{"iteration": 1, "result": "bad"}],
|
(
|
||||||
"Entry at index 0 must include numeric 'result'"),
|
[{"iteration": 1, "result": "bad"}],
|
||||||
|
"Entry at index 0 must include numeric 'result'",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ def test_parameter_create_missing_scenario_returns_404(
|
|||||||
|
|
||||||
@pytest.mark.usefixtures("invalid_request_payloads")
|
@pytest.mark.usefixtures("invalid_request_payloads")
|
||||||
def test_parameter_create_invalid_distribution_is_422(
|
def test_parameter_create_invalid_distribution_is_422(
|
||||||
api_client: TestClient
|
api_client: TestClient,
|
||||||
) -> None:
|
) -> None:
|
||||||
response = api_client.post(
|
response = api_client.post(
|
||||||
"/api/parameters/",
|
"/api/parameters/",
|
||||||
@@ -90,6 +90,5 @@ def test_maintenance_negative_cost_rejected_by_schema(
|
|||||||
payload = invalid_request_payloads["maintenance_negative_cost"]
|
payload = invalid_request_payloads["maintenance_negative_cost"]
|
||||||
response = api_client.post("/api/maintenance/", json=payload)
|
response = api_client.post("/api/maintenance/", json=payload)
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
error_locations = [tuple(item["loc"])
|
error_locations = [tuple(item["loc"]) for item in response.json()["detail"]]
|
||||||
for item in response.json()["detail"]]
|
|
||||||
assert ("body", "cost") in error_locations
|
assert ("body", "cost") in error_locations
|
||||||
|
|||||||
53
tests/unit/test_settings_routes.py
Normal file
53
tests/unit/test_settings_routes.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from services import settings as settings_service
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_read_css_settings_reflects_env_overrides(
|
||||||
|
api_client: TestClient, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
|
env_var = settings_service.css_key_to_env_var("--color-background")
|
||||||
|
monkeypatch.setenv(env_var, "#123456")
|
||||||
|
|
||||||
|
response = api_client.get("/api/settings/css")
|
||||||
|
assert response.status_code == 200
|
||||||
|
body = response.json()
|
||||||
|
|
||||||
|
assert body["variables"]["--color-background"] == "#123456"
|
||||||
|
assert body["env_overrides"]["--color-background"] == "#123456"
|
||||||
|
assert any(
|
||||||
|
source["env_var"] == env_var and source["value"] == "#123456"
|
||||||
|
for source in body["env_sources"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_update_css_settings_persists_changes(
|
||||||
|
api_client: TestClient, db_session: Session
|
||||||
|
) -> None:
|
||||||
|
payload = {"variables": {"--color-primary": "#112233"}}
|
||||||
|
|
||||||
|
response = api_client.put("/api/settings/css", json=payload)
|
||||||
|
assert response.status_code == 200
|
||||||
|
body = response.json()
|
||||||
|
|
||||||
|
assert body["variables"]["--color-primary"] == "#112233"
|
||||||
|
|
||||||
|
persisted = settings_service.get_css_color_settings(db_session)
|
||||||
|
assert persisted["--color-primary"] == "#112233"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_update_css_settings_invalid_value_returns_422(
|
||||||
|
api_client: TestClient,
|
||||||
|
) -> None:
|
||||||
|
response = api_client.put(
|
||||||
|
"/api/settings/css",
|
||||||
|
json={"variables": {"--color-primary": "not-a-color"}},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
body = response.json()
|
||||||
|
assert "color" in body["detail"].lower()
|
||||||
149
tests/unit/test_settings_service.py
Normal file
149
tests/unit/test_settings_service.py
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
from types import SimpleNamespace
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from models.application_setting import ApplicationSetting
|
||||||
|
from services import settings as settings_service
|
||||||
|
from services.settings import CSS_COLOR_DEFAULTS
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="clean_env")
|
||||||
|
def fixture_clean_env(monkeypatch: pytest.MonkeyPatch) -> Dict[str, str]:
|
||||||
|
"""Provide an isolated environment mapping for tests."""
|
||||||
|
|
||||||
|
env: Dict[str, str] = {}
|
||||||
|
monkeypatch.setattr(settings_service, "os", SimpleNamespace(environ=env))
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
def test_css_key_to_env_var_formatting():
|
||||||
|
assert (
|
||||||
|
settings_service.css_key_to_env_var("--color-background")
|
||||||
|
== "CALMINER_THEME_COLOR_BACKGROUND"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
settings_service.css_key_to_env_var("--color-primary-stronger")
|
||||||
|
== "CALMINER_THEME_COLOR_PRIMARY_STRONGER"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"env_key,env_value",
|
||||||
|
[
|
||||||
|
("--color-background", "#ffffff"),
|
||||||
|
("--color-primary", "rgb(10, 20, 30)"),
|
||||||
|
("--color-accent", "rgba(1,2,3,0.5)"),
|
||||||
|
("--color-text-secondary", "hsla(210, 40%, 40%, 1)"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_css_color_env_overrides_valid_values(
|
||||||
|
clean_env, env_key, env_value
|
||||||
|
):
|
||||||
|
env_var = settings_service.css_key_to_env_var(env_key)
|
||||||
|
clean_env[env_var] = env_value
|
||||||
|
|
||||||
|
overrides = settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
assert overrides[env_key] == env_value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"invalid_value",
|
||||||
|
[
|
||||||
|
"", # empty
|
||||||
|
"not-a-color", # arbitrary string
|
||||||
|
"#12", # short hex
|
||||||
|
"rgb(1,2)", # malformed rgb
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_css_color_env_overrides_invalid_values_raise(
|
||||||
|
clean_env, invalid_value
|
||||||
|
):
|
||||||
|
env_var = settings_service.css_key_to_env_var("--color-background")
|
||||||
|
clean_env[env_var] = invalid_value
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_css_color_env_overrides_ignores_missing(clean_env):
|
||||||
|
overrides = settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
assert overrides == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_css_env_override_rows_returns_structured_data(clean_env):
|
||||||
|
clean_env[settings_service.css_key_to_env_var("--color-primary")] = (
|
||||||
|
"#123456"
|
||||||
|
)
|
||||||
|
rows = settings_service.list_css_env_override_rows(clean_env)
|
||||||
|
assert rows == [
|
||||||
|
{
|
||||||
|
"css_key": "--color-primary",
|
||||||
|
"env_var": settings_service.css_key_to_env_var("--color-primary"),
|
||||||
|
"value": "#123456",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_color_value_strips_and_validates():
|
||||||
|
assert settings_service._normalize_color_value(" #abcdef ") == "#abcdef"
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value(123) # type: ignore[arg-type]
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value(" ")
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value("#12")
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_css_color_settings_creates_defaults(db_session: Session):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
|
||||||
|
stored = {
|
||||||
|
record.key: record.value
|
||||||
|
for record in db_session.query(ApplicationSetting).all()
|
||||||
|
}
|
||||||
|
assert set(stored.keys()) == set(CSS_COLOR_DEFAULTS.keys())
|
||||||
|
assert stored == CSS_COLOR_DEFAULTS
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_css_color_settings_persists_changes(db_session: Session):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
|
||||||
|
updated = settings_service.update_css_color_settings(
|
||||||
|
db_session,
|
||||||
|
{"--color-background": "#000000", "--color-accent": "#abcdef"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert updated["--color-background"] == "#000000"
|
||||||
|
assert updated["--color-accent"] == "#abcdef"
|
||||||
|
|
||||||
|
stored = {
|
||||||
|
record.key: record.value
|
||||||
|
for record in db_session.query(ApplicationSetting).all()
|
||||||
|
}
|
||||||
|
assert stored["--color-background"] == "#000000"
|
||||||
|
assert stored["--color-accent"] == "#abcdef"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_css_color_settings_respects_env_overrides(
|
||||||
|
db_session: Session, clean_env: Dict[str, str]
|
||||||
|
):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
override_value = "#112233"
|
||||||
|
clean_env[settings_service.css_key_to_env_var("--color-background")] = (
|
||||||
|
override_value
|
||||||
|
)
|
||||||
|
|
||||||
|
values = settings_service.get_css_color_settings(db_session)
|
||||||
|
|
||||||
|
assert values["--color-background"] == override_value
|
||||||
|
|
||||||
|
db_value = (
|
||||||
|
db_session.query(ApplicationSetting)
|
||||||
|
.filter_by(key="--color-background")
|
||||||
|
.one()
|
||||||
|
.value
|
||||||
|
)
|
||||||
|
assert db_value != override_value
|
||||||
@@ -31,10 +31,13 @@ def setup_instance(mock_config: DatabaseConfig) -> DatabaseSetup:
|
|||||||
return DatabaseSetup(mock_config, dry_run=True)
|
return DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
|
|
||||||
def test_seed_baseline_data_dry_run_skips_verification(setup_instance: DatabaseSetup) -> None:
|
def test_seed_baseline_data_dry_run_skips_verification(
|
||||||
with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object(
|
setup_instance: DatabaseSetup,
|
||||||
setup_instance, "_verify_seeded_data"
|
) -> None:
|
||||||
) as verify_mock:
|
with (
|
||||||
|
mock.patch("scripts.seed_data.run_with_namespace") as seed_run,
|
||||||
|
mock.patch.object(setup_instance, "_verify_seeded_data") as verify_mock,
|
||||||
|
):
|
||||||
setup_instance.seed_baseline_data(dry_run=True)
|
setup_instance.seed_baseline_data(dry_run=True)
|
||||||
|
|
||||||
seed_run.assert_called_once()
|
seed_run.assert_called_once()
|
||||||
@@ -47,13 +50,16 @@ def test_seed_baseline_data_dry_run_skips_verification(setup_instance: DatabaseS
|
|||||||
verify_mock.assert_not_called()
|
verify_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_seed_baseline_data_invokes_verification(setup_instance: DatabaseSetup) -> None:
|
def test_seed_baseline_data_invokes_verification(
|
||||||
|
setup_instance: DatabaseSetup,
|
||||||
|
) -> None:
|
||||||
expected_currencies = {code for code, *_ in seed_data.CURRENCY_SEEDS}
|
expected_currencies = {code for code, *_ in seed_data.CURRENCY_SEEDS}
|
||||||
expected_units = {code for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS}
|
expected_units = {code for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS}
|
||||||
|
|
||||||
with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object(
|
with (
|
||||||
setup_instance, "_verify_seeded_data"
|
mock.patch("scripts.seed_data.run_with_namespace") as seed_run,
|
||||||
) as verify_mock:
|
mock.patch.object(setup_instance, "_verify_seeded_data") as verify_mock,
|
||||||
|
):
|
||||||
setup_instance.seed_baseline_data(dry_run=False)
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
|
|
||||||
seed_run.assert_called_once()
|
seed_run.assert_called_once()
|
||||||
@@ -67,7 +73,9 @@ def test_seed_baseline_data_invokes_verification(setup_instance: DatabaseSetup)
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_migrations_applies_baseline_when_missing(mock_config: DatabaseConfig, tmp_path) -> None:
|
def test_run_migrations_applies_baseline_when_missing(
|
||||||
|
mock_config: DatabaseConfig, tmp_path
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
baseline = tmp_path / "000_base.sql"
|
baseline = tmp_path / "000_base.sql"
|
||||||
@@ -88,15 +96,24 @@ def test_run_migrations_applies_baseline_when_missing(mock_config: DatabaseConfi
|
|||||||
cursor_context.__enter__.return_value = cursor_mock
|
cursor_context.__enter__.return_value = cursor_mock
|
||||||
connection_mock.cursor.return_value = cursor_context
|
connection_mock.cursor.return_value = cursor_context
|
||||||
|
|
||||||
with mock.patch.object(
|
with (
|
||||||
setup_instance, "_application_connection", return_value=connection_mock
|
mock.patch.object(
|
||||||
), mock.patch.object(
|
setup_instance,
|
||||||
setup_instance, "_migrations_table_exists", return_value=True
|
"_application_connection",
|
||||||
), mock.patch.object(
|
return_value=connection_mock,
|
||||||
setup_instance, "_fetch_applied_migrations", return_value=set()
|
),
|
||||||
), mock.patch.object(
|
mock.patch.object(
|
||||||
setup_instance, "_apply_migration_file", side_effect=capture_migration
|
setup_instance, "_migrations_table_exists", return_value=True
|
||||||
) as apply_mock:
|
),
|
||||||
|
mock.patch.object(
|
||||||
|
setup_instance, "_fetch_applied_migrations", return_value=set()
|
||||||
|
),
|
||||||
|
mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_apply_migration_file",
|
||||||
|
side_effect=capture_migration,
|
||||||
|
) as apply_mock,
|
||||||
|
):
|
||||||
setup_instance.run_migrations(tmp_path)
|
setup_instance.run_migrations(tmp_path)
|
||||||
|
|
||||||
assert apply_mock.call_count == 1
|
assert apply_mock.call_count == 1
|
||||||
@@ -121,17 +138,24 @@ def test_run_migrations_noop_when_all_files_already_applied(
|
|||||||
|
|
||||||
connection_mock, cursor_mock = _connection_with_cursor()
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
|
||||||
with mock.patch.object(
|
with (
|
||||||
setup_instance, "_application_connection", return_value=connection_mock
|
mock.patch.object(
|
||||||
), mock.patch.object(
|
setup_instance,
|
||||||
setup_instance, "_migrations_table_exists", return_value=True
|
"_application_connection",
|
||||||
), mock.patch.object(
|
return_value=connection_mock,
|
||||||
setup_instance,
|
),
|
||||||
"_fetch_applied_migrations",
|
mock.patch.object(
|
||||||
return_value={"000_base.sql", "20251022_add_other.sql"},
|
setup_instance, "_migrations_table_exists", return_value=True
|
||||||
), mock.patch.object(
|
),
|
||||||
setup_instance, "_apply_migration_file"
|
mock.patch.object(
|
||||||
) as apply_mock:
|
setup_instance,
|
||||||
|
"_fetch_applied_migrations",
|
||||||
|
return_value={"000_base.sql", "20251022_add_other.sql"},
|
||||||
|
),
|
||||||
|
mock.patch.object(
|
||||||
|
setup_instance, "_apply_migration_file"
|
||||||
|
) as apply_mock,
|
||||||
|
):
|
||||||
setup_instance.run_migrations(tmp_path)
|
setup_instance.run_migrations(tmp_path)
|
||||||
|
|
||||||
apply_mock.assert_not_called()
|
apply_mock.assert_not_called()
|
||||||
@@ -148,12 +172,16 @@ def _connection_with_cursor() -> tuple[mock.MagicMock, mock.MagicMock]:
|
|||||||
return connection_mock, cursor_mock
|
return connection_mock, cursor_mock
|
||||||
|
|
||||||
|
|
||||||
def test_verify_seeded_data_raises_when_currency_missing(mock_config: DatabaseConfig) -> None:
|
def test_verify_seeded_data_raises_when_currency_missing(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock, cursor_mock = _connection_with_cursor()
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
cursor_mock.fetchall.return_value = [("USD", True)]
|
cursor_mock.fetchall.return_value = [("USD", True)]
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
):
|
||||||
with pytest.raises(RuntimeError) as exc:
|
with pytest.raises(RuntimeError) as exc:
|
||||||
setup_instance._verify_seeded_data(
|
setup_instance._verify_seeded_data(
|
||||||
expected_currency_codes={"USD", "EUR"},
|
expected_currency_codes={"USD", "EUR"},
|
||||||
@@ -163,12 +191,16 @@ def test_verify_seeded_data_raises_when_currency_missing(mock_config: DatabaseCo
|
|||||||
assert "EUR" in str(exc.value)
|
assert "EUR" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
def test_verify_seeded_data_raises_when_default_currency_inactive(mock_config: DatabaseConfig) -> None:
|
def test_verify_seeded_data_raises_when_default_currency_inactive(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock, cursor_mock = _connection_with_cursor()
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
cursor_mock.fetchall.return_value = [("USD", False)]
|
cursor_mock.fetchall.return_value = [("USD", False)]
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
):
|
||||||
with pytest.raises(RuntimeError) as exc:
|
with pytest.raises(RuntimeError) as exc:
|
||||||
setup_instance._verify_seeded_data(
|
setup_instance._verify_seeded_data(
|
||||||
expected_currency_codes={"USD"},
|
expected_currency_codes={"USD"},
|
||||||
@@ -178,12 +210,16 @@ def test_verify_seeded_data_raises_when_default_currency_inactive(mock_config: D
|
|||||||
assert "inactive" in str(exc.value)
|
assert "inactive" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
def test_verify_seeded_data_raises_when_units_missing(mock_config: DatabaseConfig) -> None:
|
def test_verify_seeded_data_raises_when_units_missing(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock, cursor_mock = _connection_with_cursor()
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
cursor_mock.fetchall.return_value = [("tonnes", True)]
|
cursor_mock.fetchall.return_value = [("tonnes", True)]
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
):
|
||||||
with pytest.raises(RuntimeError) as exc:
|
with pytest.raises(RuntimeError) as exc:
|
||||||
setup_instance._verify_seeded_data(
|
setup_instance._verify_seeded_data(
|
||||||
expected_currency_codes=set(),
|
expected_currency_codes=set(),
|
||||||
@@ -193,12 +229,18 @@ def test_verify_seeded_data_raises_when_units_missing(mock_config: DatabaseConfi
|
|||||||
assert "liters" in str(exc.value)
|
assert "liters" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
def test_verify_seeded_data_raises_when_measurement_table_missing(mock_config: DatabaseConfig) -> None:
|
def test_verify_seeded_data_raises_when_measurement_table_missing(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock, cursor_mock = _connection_with_cursor()
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
cursor_mock.execute.side_effect = psycopg_errors.UndefinedTable("relation does not exist")
|
cursor_mock.execute.side_effect = psycopg_errors.UndefinedTable(
|
||||||
|
"relation does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
):
|
||||||
with pytest.raises(RuntimeError) as exc:
|
with pytest.raises(RuntimeError) as exc:
|
||||||
setup_instance._verify_seeded_data(
|
setup_instance._verify_seeded_data(
|
||||||
expected_currency_codes=set(),
|
expected_currency_codes=set(),
|
||||||
@@ -226,9 +268,14 @@ def test_seed_baseline_data_rerun_uses_existing_records(
|
|||||||
unit_rows,
|
unit_rows,
|
||||||
]
|
]
|
||||||
|
|
||||||
with mock.patch.object(
|
with (
|
||||||
setup_instance, "_application_connection", return_value=connection_mock
|
mock.patch.object(
|
||||||
), mock.patch("scripts.seed_data.run_with_namespace") as seed_run:
|
setup_instance,
|
||||||
|
"_application_connection",
|
||||||
|
return_value=connection_mock,
|
||||||
|
),
|
||||||
|
mock.patch("scripts.seed_data.run_with_namespace") as seed_run,
|
||||||
|
):
|
||||||
setup_instance.seed_baseline_data(dry_run=False)
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
setup_instance.seed_baseline_data(dry_run=False)
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
|
|
||||||
@@ -240,7 +287,9 @@ def test_seed_baseline_data_rerun_uses_existing_records(
|
|||||||
assert cursor_mock.execute.call_count == 4
|
assert cursor_mock.execute.call_count == 4
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_database_raises_with_context(mock_config: DatabaseConfig) -> None:
|
def test_ensure_database_raises_with_context(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock = mock.MagicMock()
|
connection_mock = mock.MagicMock()
|
||||||
cursor_mock = mock.MagicMock()
|
cursor_mock = mock.MagicMock()
|
||||||
@@ -248,14 +297,18 @@ def test_ensure_database_raises_with_context(mock_config: DatabaseConfig) -> Non
|
|||||||
cursor_mock.execute.side_effect = [None, psycopg2.Error("create_fail")]
|
cursor_mock.execute.side_effect = [None, psycopg2.Error("create_fail")]
|
||||||
connection_mock.cursor.return_value = cursor_mock
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock):
|
with mock.patch.object(
|
||||||
|
setup_instance, "_admin_connection", return_value=connection_mock
|
||||||
|
):
|
||||||
with pytest.raises(RuntimeError) as exc:
|
with pytest.raises(RuntimeError) as exc:
|
||||||
setup_instance.ensure_database()
|
setup_instance.ensure_database()
|
||||||
|
|
||||||
assert "Failed to create database" in str(exc.value)
|
assert "Failed to create database" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_role_raises_with_context_during_creation(mock_config: DatabaseConfig) -> None:
|
def test_ensure_role_raises_with_context_during_creation(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
admin_conn, admin_cursor = _connection_with_cursor()
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
@@ -295,7 +348,9 @@ def test_ensure_role_raises_with_context_during_privilege_grants(
|
|||||||
assert "Failed to grant privileges" in str(exc.value)
|
assert "Failed to grant privileges" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_database_dry_run_skips_creation(mock_config: DatabaseConfig) -> None:
|
def test_ensure_database_dry_run_skips_creation(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
connection_mock = mock.MagicMock()
|
connection_mock = mock.MagicMock()
|
||||||
@@ -303,45 +358,59 @@ def test_ensure_database_dry_run_skips_creation(mock_config: DatabaseConfig) ->
|
|||||||
cursor_mock.fetchone.return_value = None
|
cursor_mock.fetchone.return_value = None
|
||||||
connection_mock.cursor.return_value = cursor_mock
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch(
|
with (
|
||||||
"scripts.setup_database.logger"
|
mock.patch.object(
|
||||||
) as logger_mock:
|
setup_instance, "_admin_connection", return_value=connection_mock
|
||||||
|
),
|
||||||
|
mock.patch("scripts.setup_database.logger") as logger_mock,
|
||||||
|
):
|
||||||
setup_instance.ensure_database()
|
setup_instance.ensure_database()
|
||||||
|
|
||||||
# expect only existence check, no create attempt
|
# expect only existence check, no create attempt
|
||||||
cursor_mock.execute.assert_called_once()
|
cursor_mock.execute.assert_called_once()
|
||||||
logger_mock.info.assert_any_call(
|
logger_mock.info.assert_any_call(
|
||||||
"Dry run: would create database '%s'. Run without --dry-run to proceed.", mock_config.database
|
"Dry run: would create database '%s'. Run without --dry-run to proceed.",
|
||||||
|
mock_config.database,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_role_dry_run_skips_creation_and_grants(mock_config: DatabaseConfig) -> None:
|
def test_ensure_role_dry_run_skips_creation_and_grants(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
admin_conn, admin_cursor = _connection_with_cursor()
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
admin_cursor.fetchone.return_value = None
|
admin_cursor.fetchone.return_value = None
|
||||||
|
|
||||||
with mock.patch.object(
|
with (
|
||||||
setup_instance,
|
mock.patch.object(
|
||||||
"_admin_connection",
|
setup_instance,
|
||||||
side_effect=[admin_conn],
|
"_admin_connection",
|
||||||
) as conn_mock, mock.patch("scripts.setup_database.logger") as logger_mock:
|
side_effect=[admin_conn],
|
||||||
|
) as conn_mock,
|
||||||
|
mock.patch("scripts.setup_database.logger") as logger_mock,
|
||||||
|
):
|
||||||
setup_instance.ensure_role()
|
setup_instance.ensure_role()
|
||||||
|
|
||||||
assert conn_mock.call_count == 1
|
assert conn_mock.call_count == 1
|
||||||
admin_cursor.execute.assert_called_once()
|
admin_cursor.execute.assert_called_once()
|
||||||
logger_mock.info.assert_any_call(
|
logger_mock.info.assert_any_call(
|
||||||
"Dry run: would create role '%s'. Run without --dry-run to apply.", mock_config.user
|
"Dry run: would create role '%s'. Run without --dry-run to apply.",
|
||||||
|
mock_config.user,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_register_rollback_skipped_when_dry_run(mock_config: DatabaseConfig) -> None:
|
def test_register_rollback_skipped_when_dry_run(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
setup_instance._register_rollback("noop", lambda: None)
|
setup_instance._register_rollback("noop", lambda: None)
|
||||||
assert setup_instance._rollback_actions == []
|
assert setup_instance._rollback_actions == []
|
||||||
|
|
||||||
|
|
||||||
def test_execute_rollbacks_runs_in_reverse_order(mock_config: DatabaseConfig) -> None:
|
def test_execute_rollbacks_runs_in_reverse_order(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
calls: list[str] = []
|
calls: list[str] = []
|
||||||
@@ -362,16 +431,24 @@ def test_execute_rollbacks_runs_in_reverse_order(mock_config: DatabaseConfig) ->
|
|||||||
assert setup_instance._rollback_actions == []
|
assert setup_instance._rollback_actions == []
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_database_registers_rollback_action(mock_config: DatabaseConfig) -> None:
|
def test_ensure_database_registers_rollback_action(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
connection_mock = mock.MagicMock()
|
connection_mock = mock.MagicMock()
|
||||||
cursor_mock = mock.MagicMock()
|
cursor_mock = mock.MagicMock()
|
||||||
cursor_mock.fetchone.return_value = None
|
cursor_mock.fetchone.return_value = None
|
||||||
connection_mock.cursor.return_value = cursor_mock
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch.object(
|
with (
|
||||||
setup_instance, "_register_rollback"
|
mock.patch.object(
|
||||||
) as register_mock, mock.patch.object(setup_instance, "_drop_database") as drop_mock:
|
setup_instance, "_admin_connection", return_value=connection_mock
|
||||||
|
),
|
||||||
|
mock.patch.object(
|
||||||
|
setup_instance, "_register_rollback"
|
||||||
|
) as register_mock,
|
||||||
|
mock.patch.object(setup_instance, "_drop_database") as drop_mock,
|
||||||
|
):
|
||||||
setup_instance.ensure_database()
|
setup_instance.ensure_database()
|
||||||
register_mock.assert_called_once()
|
register_mock.assert_called_once()
|
||||||
label, action = register_mock.call_args[0]
|
label, action = register_mock.call_args[0]
|
||||||
@@ -380,24 +457,29 @@ def test_ensure_database_registers_rollback_action(mock_config: DatabaseConfig)
|
|||||||
drop_mock.assert_called_once_with(mock_config.database)
|
drop_mock.assert_called_once_with(mock_config.database)
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_role_registers_rollback_actions(mock_config: DatabaseConfig) -> None:
|
def test_ensure_role_registers_rollback_actions(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
admin_conn, admin_cursor = _connection_with_cursor()
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
admin_cursor.fetchone.return_value = None
|
admin_cursor.fetchone.return_value = None
|
||||||
privilege_conn, privilege_cursor = _connection_with_cursor()
|
privilege_conn, privilege_cursor = _connection_with_cursor()
|
||||||
|
|
||||||
with mock.patch.object(
|
with (
|
||||||
setup_instance,
|
mock.patch.object(
|
||||||
"_admin_connection",
|
setup_instance,
|
||||||
side_effect=[admin_conn, privilege_conn],
|
"_admin_connection",
|
||||||
), mock.patch.object(
|
side_effect=[admin_conn, privilege_conn],
|
||||||
setup_instance, "_register_rollback"
|
),
|
||||||
) as register_mock, mock.patch.object(
|
mock.patch.object(
|
||||||
setup_instance, "_drop_role"
|
setup_instance, "_register_rollback"
|
||||||
) as drop_mock, mock.patch.object(
|
) as register_mock,
|
||||||
setup_instance, "_revoke_role_privileges"
|
mock.patch.object(setup_instance, "_drop_role") as drop_mock,
|
||||||
) as revoke_mock:
|
mock.patch.object(
|
||||||
|
setup_instance, "_revoke_role_privileges"
|
||||||
|
) as revoke_mock,
|
||||||
|
):
|
||||||
setup_instance.ensure_role()
|
setup_instance.ensure_role()
|
||||||
assert register_mock.call_count == 2
|
assert register_mock.call_count == 2
|
||||||
drop_label, drop_action = register_mock.call_args_list[0][0]
|
drop_label, drop_action = register_mock.call_args_list[0][0]
|
||||||
@@ -413,7 +495,9 @@ def test_ensure_role_registers_rollback_actions(mock_config: DatabaseConfig) ->
|
|||||||
revoke_mock.assert_called_once()
|
revoke_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
def test_main_triggers_rollbacks_on_failure(mock_config: DatabaseConfig) -> None:
|
def test_main_triggers_rollbacks_on_failure(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
args = argparse.Namespace(
|
args = argparse.Namespace(
|
||||||
ensure_database=True,
|
ensure_database=True,
|
||||||
ensure_role=True,
|
ensure_role=True,
|
||||||
@@ -437,11 +521,13 @@ def test_main_triggers_rollbacks_on_failure(mock_config: DatabaseConfig) -> None
|
|||||||
verbose=0,
|
verbose=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
with mock.patch.object(setup_db_module, "parse_args", return_value=args), mock.patch.object(
|
with (
|
||||||
setup_db_module.DatabaseConfig, "from_env", return_value=mock_config
|
mock.patch.object(setup_db_module, "parse_args", return_value=args),
|
||||||
), mock.patch.object(
|
mock.patch.object(
|
||||||
setup_db_module, "DatabaseSetup"
|
setup_db_module.DatabaseConfig, "from_env", return_value=mock_config
|
||||||
) as setup_cls:
|
),
|
||||||
|
mock.patch.object(setup_db_module, "DatabaseSetup") as setup_cls,
|
||||||
|
):
|
||||||
setup_instance = mock.MagicMock()
|
setup_instance = mock.MagicMock()
|
||||||
setup_instance.dry_run = False
|
setup_instance.dry_run = False
|
||||||
setup_instance._rollback_actions = [
|
setup_instance._rollback_actions = [
|
||||||
|
|||||||
@@ -19,7 +19,12 @@ def client(api_client: TestClient) -> TestClient:
|
|||||||
|
|
||||||
def test_run_simulation_function_generates_samples():
|
def test_run_simulation_function_generates_samples():
|
||||||
params: List[Dict[str, Any]] = [
|
params: List[Dict[str, Any]] = [
|
||||||
{"name": "grade", "value": 1.8, "distribution": "normal", "std_dev": 0.2},
|
{
|
||||||
|
"name": "grade",
|
||||||
|
"value": 1.8,
|
||||||
|
"distribution": "normal",
|
||||||
|
"std_dev": 0.2,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "recovery",
|
"name": "recovery",
|
||||||
"value": 0.9,
|
"value": 0.9,
|
||||||
@@ -45,7 +50,10 @@ def test_run_simulation_with_zero_iterations_returns_empty():
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"parameter_payload,error_message",
|
"parameter_payload,error_message",
|
||||||
[
|
[
|
||||||
({"name": "missing-value"}, "Parameter at index 0 must include 'value'"),
|
(
|
||||||
|
{"name": "missing-value"},
|
||||||
|
"Parameter at index 0 must include 'value'",
|
||||||
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
"name": "bad-dist",
|
"name": "bad-dist",
|
||||||
@@ -110,7 +118,8 @@ def test_run_simulation_triangular_sampling_path():
|
|||||||
span = 10.0 * DEFAULT_UNIFORM_SPAN_RATIO
|
span = 10.0 * DEFAULT_UNIFORM_SPAN_RATIO
|
||||||
rng = Random(seed)
|
rng = Random(seed)
|
||||||
expected_samples = [
|
expected_samples = [
|
||||||
rng.triangular(10.0 - span, 10.0 + span, 10.0) for _ in range(iterations)
|
rng.triangular(10.0 - span, 10.0 + span, 10.0)
|
||||||
|
for _ in range(iterations)
|
||||||
]
|
]
|
||||||
actual_samples = [entry["result"] for entry in results]
|
actual_samples = [entry["result"] for entry in results]
|
||||||
for actual, expected in zip(actual_samples, expected_samples):
|
for actual, expected in zip(actual_samples, expected_samples):
|
||||||
@@ -156,9 +165,7 @@ def test_simulation_endpoint_no_params(client: TestClient):
|
|||||||
assert resp.json()["detail"] == "No parameters provided"
|
assert resp.json()["detail"] == "No parameters provided"
|
||||||
|
|
||||||
|
|
||||||
def test_simulation_endpoint_success(
|
def test_simulation_endpoint_success(client: TestClient, db_session: Session):
|
||||||
client: TestClient, db_session: Session
|
|
||||||
):
|
|
||||||
scenario_payload: Dict[str, Any] = {
|
scenario_payload: Dict[str, Any] = {
|
||||||
"name": f"SimScenario-{uuid4()}",
|
"name": f"SimScenario-{uuid4()}",
|
||||||
"description": "Simulation test",
|
"description": "Simulation test",
|
||||||
@@ -168,7 +175,12 @@ def test_simulation_endpoint_success(
|
|||||||
scenario_id = scenario_resp.json()["id"]
|
scenario_id = scenario_resp.json()["id"]
|
||||||
|
|
||||||
params: List[Dict[str, Any]] = [
|
params: List[Dict[str, Any]] = [
|
||||||
{"name": "param1", "value": 2.5, "distribution": "normal", "std_dev": 0.5}
|
{
|
||||||
|
"name": "param1",
|
||||||
|
"value": 2.5,
|
||||||
|
"distribution": "normal",
|
||||||
|
"std_dev": 0.5,
|
||||||
|
}
|
||||||
]
|
]
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"scenario_id": scenario_id,
|
"scenario_id": scenario_id,
|
||||||
|
|||||||
63
tests/unit/test_theme_settings.py
Normal file
63
tests/unit/test_theme_settings.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import pytest
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from main import app
|
||||||
|
from models.theme_setting import ThemeSetting
|
||||||
|
from services.settings import save_theme_settings, get_theme_settings
|
||||||
|
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def test_save_theme_settings(db_session: Session):
|
||||||
|
theme_data = {
|
||||||
|
"theme_name": "dark",
|
||||||
|
"primary_color": "#000000",
|
||||||
|
"secondary_color": "#333333",
|
||||||
|
"accent_color": "#ff0000",
|
||||||
|
"background_color": "#1a1a1a",
|
||||||
|
"text_color": "#ffffff"
|
||||||
|
}
|
||||||
|
|
||||||
|
saved_setting = save_theme_settings(db_session, theme_data)
|
||||||
|
assert str(saved_setting.theme_name) == "dark"
|
||||||
|
assert str(saved_setting.primary_color) == "#000000"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_theme_settings(db_session: Session):
|
||||||
|
# Create a theme setting first
|
||||||
|
theme_data = {
|
||||||
|
"theme_name": "light",
|
||||||
|
"primary_color": "#ffffff",
|
||||||
|
"secondary_color": "#cccccc",
|
||||||
|
"accent_color": "#0000ff",
|
||||||
|
"background_color": "#f0f0f0",
|
||||||
|
"text_color": "#000000"
|
||||||
|
}
|
||||||
|
save_theme_settings(db_session, theme_data)
|
||||||
|
|
||||||
|
settings = get_theme_settings(db_session)
|
||||||
|
assert settings["theme_name"] == "light"
|
||||||
|
assert settings["primary_color"] == "#ffffff"
|
||||||
|
|
||||||
|
|
||||||
|
def test_theme_settings_api(api_client):
|
||||||
|
# Test API endpoint for saving theme settings
|
||||||
|
theme_data = {
|
||||||
|
"theme_name": "test_theme",
|
||||||
|
"primary_color": "#123456",
|
||||||
|
"secondary_color": "#789abc",
|
||||||
|
"accent_color": "#def012",
|
||||||
|
"background_color": "#345678",
|
||||||
|
"text_color": "#9abcde"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = api_client.post("/api/settings/theme", json=theme_data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["theme"]["theme_name"] == "test_theme"
|
||||||
|
|
||||||
|
# Test API endpoint for getting theme settings
|
||||||
|
response = api_client.get("/api/settings/theme")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["theme_name"] == "test_theme"
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user