Compare commits
62 Commits
97b1c0360b
...
feat/ci-ov
| Author | SHA1 | Date | |
|---|---|---|---|
| df1c971354 | |||
| 3a8aef04b0 | |||
| 45d746d80a | |||
| f1bc7f06b9 | |||
| 82e98efb1b | |||
| f91349dedd | |||
| efee50fdc7 | |||
| e254d50c0c | |||
| 6eef8424b7 | |||
| c1f4902cf4 | |||
| 52450bc487 | |||
| c3449f1986 | |||
| f863808940 | |||
| 37646b571a | |||
| 22f43bed56 | |||
| 72cf06a31d | |||
| b796a053d6 | |||
| 04d7f202b6 | |||
| 1f58de448c | |||
| 807204869f | |||
| ddb23b1da0 | |||
| 26e231d63f | |||
| d98d6ebe83 | |||
| e881be52b5 | |||
| cc8efa3eab | |||
| 29a17595da | |||
| a0431cb630 | |||
| f1afcaa78b | |||
| 36da0609ed | |||
| 26843104ee | |||
| eb509e3dd2 | |||
| 51aa2fa71d | |||
| e1689c3a31 | |||
| 99d9ea7770 | |||
| 2136dbdd44 | |||
| 3da8a50ac4 | |||
| a772960390 | |||
| 89a4f663b5 | |||
| 50446c4248 | |||
| c5a9a7c96f | |||
| 723f6a62b8 | |||
| dcb08ab1b8 | |||
| a6a5f630cc | |||
| b56045ca6a | |||
| 2f07e6fb75 | |||
| 1f8a595243 | |||
| 54137b88d7 | |||
| 7385bdad3e | |||
| 7d0c8bfc53 | |||
| a861efeabf | |||
| 2f5306b793 | |||
| 573e255769 | |||
| 8bb5456864 | |||
| b1d50a56e0 | |||
| e37488bcf6 | |||
| ee0a7a5bf5 | |||
| ef4fb7dcf0 | |||
| 7f4cd33b65 | |||
| 41156a87d1 | |||
| 3fc6a2a9d3 | |||
| f3da80885f | |||
| 300ecebe23 |
@@ -10,6 +10,8 @@ venv/
|
|||||||
.vscode
|
.vscode
|
||||||
.git
|
.git
|
||||||
.gitignore
|
.gitignore
|
||||||
|
.gitea
|
||||||
|
.github
|
||||||
.DS_Store
|
.DS_Store
|
||||||
dist
|
dist
|
||||||
build
|
build
|
||||||
@@ -17,5 +19,9 @@ build
|
|||||||
*.sqlite3
|
*.sqlite3
|
||||||
.env
|
.env
|
||||||
.env.*
|
.env.*
|
||||||
.Dockerfile
|
coverage/
|
||||||
.dockerignore
|
logs/
|
||||||
|
backups/
|
||||||
|
tests/e2e/artifacts/
|
||||||
|
scripts/__pycache__/
|
||||||
|
reports/
|
||||||
|
|||||||
@@ -1,111 +0,0 @@
|
|||||||
name: Setup Python Environment
|
|
||||||
description: Configure Python, proxies, dependencies, and optional database setup for CI jobs.
|
|
||||||
author: CalMiner Team
|
|
||||||
inputs:
|
|
||||||
python-version:
|
|
||||||
description: Python version to install.
|
|
||||||
required: false
|
|
||||||
default: "3.10"
|
|
||||||
install-playwright:
|
|
||||||
description: Install Playwright browsers when true.
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
install-requirements:
|
|
||||||
description: Space-delimited list of requirement files to install.
|
|
||||||
required: false
|
|
||||||
default: "requirements.txt requirements-test.txt"
|
|
||||||
run-db-setup:
|
|
||||||
description: Run database wait and setup scripts when true.
|
|
||||||
required: false
|
|
||||||
default: "true"
|
|
||||||
db-dry-run:
|
|
||||||
description: Execute setup script dry run before live run when true.
|
|
||||||
required: false
|
|
||||||
default: "true"
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version }}
|
|
||||||
- name: Configure apt proxy
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
PROXY_HOST="http://apt-cacher:3142"
|
|
||||||
if ! curl -fsS --connect-timeout 3 "${PROXY_HOST}" >/dev/null; then
|
|
||||||
PROXY_HOST="http://192.168.88.14:3142"
|
|
||||||
fi
|
|
||||||
echo "Using APT proxy ${PROXY_HOST}"
|
|
||||||
{
|
|
||||||
echo "http_proxy=${PROXY_HOST}"
|
|
||||||
echo "https_proxy=${PROXY_HOST}"
|
|
||||||
echo "HTTP_PROXY=${PROXY_HOST}"
|
|
||||||
echo "HTTPS_PROXY=${PROXY_HOST}"
|
|
||||||
} >> "$GITHUB_ENV"
|
|
||||||
sudo tee /etc/apt/apt.conf.d/01proxy >/dev/null <<EOF
|
|
||||||
Acquire::http::Proxy "${PROXY_HOST}";
|
|
||||||
Acquire::https::Proxy "${PROXY_HOST}";
|
|
||||||
EOF
|
|
||||||
- name: Install dependencies
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
requirements="${{ inputs.install-requirements }}"
|
|
||||||
if [ -n "${requirements}" ]; then
|
|
||||||
for requirement in ${requirements}; do
|
|
||||||
if [ -f "${requirement}" ]; then
|
|
||||||
pip install -r "${requirement}"
|
|
||||||
else
|
|
||||||
echo "Requirement file ${requirement} not found" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
- name: Install Playwright browsers
|
|
||||||
if: ${{ inputs.install-playwright == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
python -m playwright install --with-deps
|
|
||||||
- name: Wait for database service
|
|
||||||
if: ${{ inputs.run-db-setup == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
python - <<'PY'
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
dsn = (
|
|
||||||
f"dbname={os.environ['DATABASE_SUPERUSER_DB']} "
|
|
||||||
f"user={os.environ['DATABASE_SUPERUSER']} "
|
|
||||||
f"password={os.environ['DATABASE_SUPERUSER_PASSWORD']} "
|
|
||||||
f"host={os.environ['DATABASE_HOST']} "
|
|
||||||
f"port={os.environ['DATABASE_PORT']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for attempt in range(30):
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(dsn):
|
|
||||||
break
|
|
||||||
except psycopg2.OperationalError:
|
|
||||||
time.sleep(2)
|
|
||||||
else:
|
|
||||||
raise SystemExit("Postgres service did not become available")
|
|
||||||
PY
|
|
||||||
- name: Run database setup (dry run)
|
|
||||||
if: ${{ inputs.run-db-setup == 'true' && inputs.db-dry-run == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
|
||||||
- name: Run database setup
|
|
||||||
if: ${{ inputs.run-db-setup == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
name: Build and Push Docker Image
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows:
|
|
||||||
- Run Tests
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
DEFAULT_BRANCH: main
|
|
||||||
REGISTRY_ORG: allucanget
|
|
||||||
REGISTRY_IMAGE_NAME: calminer
|
|
||||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
|
||||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
|
||||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
|
||||||
WORKFLOW_RUN_HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
|
||||||
WORKFLOW_RUN_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Collect workflow metadata
|
|
||||||
id: meta
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
ref_name="${GITHUB_REF_NAME:-${GITHUB_REF##*/}}"
|
|
||||||
event_name="${GITHUB_EVENT_NAME:-}"
|
|
||||||
sha="${GITHUB_SHA:-}"
|
|
||||||
|
|
||||||
if [ -z "$ref_name" ] && [ -n "${WORKFLOW_RUN_HEAD_BRANCH:-}" ]; then
|
|
||||||
ref_name="${WORKFLOW_RUN_HEAD_BRANCH}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$sha" ] && [ -n "${WORKFLOW_RUN_HEAD_SHA:-}" ]; then
|
|
||||||
sha="${WORKFLOW_RUN_HEAD_SHA}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$ref_name" = "${DEFAULT_BRANCH:-main}" ]; then
|
|
||||||
echo "on_default=true" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "on_default=false" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "ref_name=$ref_name" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "event_name=$event_name" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "sha=$sha" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Set up QEMU and Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Log in to Gitea registry
|
|
||||||
if: ${{ steps.meta.outputs.on_default == 'true' }}
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY_URL }}
|
|
||||||
username: ${{ env.REGISTRY_USERNAME }}
|
|
||||||
password: ${{ env.REGISTRY_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: Dockerfile
|
|
||||||
push: ${{ steps.meta.outputs.on_default == 'true' && steps.meta.outputs.event_name != 'pull_request' && (env.REGISTRY_URL != '' && env.REGISTRY_USERNAME != '' && env.REGISTRY_PASSWORD != '') }}
|
|
||||||
tags: |
|
|
||||||
${{ env.REGISTRY_URL }}/${{ env.REGISTRY_ORG }}/${{ env.REGISTRY_IMAGE_NAME }}:latest
|
|
||||||
${{ env.REGISTRY_URL }}/${{ env.REGISTRY_ORG }}/${{ env.REGISTRY_IMAGE_NAME }}:${{ steps.meta.outputs.sha }}
|
|
||||||
74
.gitea/workflows/ci.yml
Normal file
74
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, develop]
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
env:
|
||||||
|
APT_CACHER_NG: http://192.168.88.14:3142
|
||||||
|
DB_DRIVER: postgresql+psycopg2
|
||||||
|
DB_HOST: 192.168.88.35
|
||||||
|
DB_NAME: calminer_test
|
||||||
|
DB_USER: calminer
|
||||||
|
DB_PASSWORD: calminer_password
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: ${ { env.DB_USER } }
|
||||||
|
POSTGRES_PASSWORD: ${ { env.DB_PASSWORD } }
|
||||||
|
POSTGRES_DB: ${ { env.DB_NAME } }
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Update apt-cacher-ng config
|
||||||
|
run: |-
|
||||||
|
echo 'Acquire::http::Proxy "{{ env.APT_CACHER_NG }}";' | tee /etc/apt/apt.conf.d/01apt-cacher-ng
|
||||||
|
apt-get update
|
||||||
|
|
||||||
|
- name: Update system packages
|
||||||
|
run: apt-get upgrade -y
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip install -r requirements-test.txt
|
||||||
|
|
||||||
|
- name: Install Playwright system dependencies
|
||||||
|
run: playwright install-deps
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: playwright install
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: ${ { env.DB_DRIVER } }
|
||||||
|
DATABASE_HOST: ${ { env.DB_HOST } }
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_USER: ${ { env.DB_USER } }
|
||||||
|
DATABASE_PASSWORD: ${ { env.DB_PASSWORD } }
|
||||||
|
DATABASE_NAME: ${ { env.DB_NAME } }
|
||||||
|
run: |
|
||||||
|
pytest tests/ --cov=.
|
||||||
|
|
||||||
|
- name: Build Docker image
|
||||||
|
run: |
|
||||||
|
docker build -t calminer .
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
name: Deploy to Server
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows:
|
|
||||||
- Build and Push Docker Image
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
DEFAULT_BRANCH: main
|
|
||||||
REGISTRY_ORG: allucanget
|
|
||||||
REGISTRY_IMAGE_NAME: calminer
|
|
||||||
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
|
|
||||||
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
|
|
||||||
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
|
|
||||||
WORKFLOW_RUN_HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
|
||||||
WORKFLOW_RUN_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
|
||||||
steps:
|
|
||||||
- name: SSH and deploy
|
|
||||||
uses: appleboy/ssh-action@master
|
|
||||||
with:
|
|
||||||
host: ${{ secrets.SSH_HOST }}
|
|
||||||
username: ${{ secrets.SSH_USERNAME }}
|
|
||||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
|
||||||
script: |
|
|
||||||
IMAGE_SHA="${{ env.WORKFLOW_RUN_HEAD_SHA }}"
|
|
||||||
IMAGE_PATH="${{ env.REGISTRY_URL }}/${{ env.REGISTRY_ORG }}/${{ env.REGISTRY_IMAGE_NAME }}"
|
|
||||||
|
|
||||||
if [ -z "$IMAGE_SHA" ]; then
|
|
||||||
echo "Missing workflow run head SHA; aborting deployment." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker pull "$IMAGE_PATH:$IMAGE_SHA"
|
|
||||||
docker stop calminer || true
|
|
||||||
docker rm calminer || true
|
|
||||||
docker run -d --name calminer -p 8000:8000 \
|
|
||||||
-e DATABASE_DRIVER=${{ secrets.DATABASE_DRIVER }} \
|
|
||||||
-e DATABASE_HOST=${{ secrets.DATABASE_HOST }} \
|
|
||||||
-e DATABASE_PORT=${{ secrets.DATABASE_PORT }} \
|
|
||||||
-e DATABASE_USER=${{ secrets.DATABASE_USER }} \
|
|
||||||
-e DATABASE_PASSWORD=${{ secrets.DATABASE_PASSWORD }} \
|
|
||||||
-e DATABASE_NAME=${{ secrets.DATABASE_NAME }} \
|
|
||||||
-e DATABASE_SCHEMA=${{ secrets.DATABASE_SCHEMA }} \
|
|
||||||
"$IMAGE_PATH:$IMAGE_SHA"
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
name: Run Tests
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
tests:
|
|
||||||
name: ${{ matrix.target }} tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
DATABASE_DRIVER: postgresql
|
|
||||||
DATABASE_HOST: postgres
|
|
||||||
DATABASE_PORT: "5432"
|
|
||||||
DATABASE_NAME: calminer_ci
|
|
||||||
DATABASE_USER: calminer
|
|
||||||
DATABASE_PASSWORD: secret
|
|
||||||
DATABASE_SCHEMA: public
|
|
||||||
DATABASE_SUPERUSER: calminer
|
|
||||||
DATABASE_SUPERUSER_PASSWORD: secret
|
|
||||||
DATABASE_SUPERUSER_DB: calminer_ci
|
|
||||||
DATABASE_URL: postgresql+psycopg2://calminer:secret@postgres:5432/calminer_ci
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
target: [unit, e2e, lint]
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:16-alpine
|
|
||||||
env:
|
|
||||||
POSTGRES_DB: calminer_ci
|
|
||||||
POSTGRES_USER: calminer
|
|
||||||
POSTGRES_PASSWORD: secret
|
|
||||||
options: >-
|
|
||||||
--health-cmd "pg_isready -U calminer -d calminer_ci"
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 10
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Prepare Python environment
|
|
||||||
uses: ./.gitea/actions/setup-python-env
|
|
||||||
with:
|
|
||||||
install-playwright: ${{ matrix.target == 'e2e' }}
|
|
||||||
- name: Run tests
|
|
||||||
run: |
|
|
||||||
if [ "${{ matrix.target }}" = "unit" ]; then
|
|
||||||
pytest tests/unit
|
|
||||||
elif [ "${{ matrix.target }}" = "lint" ]; then
|
|
||||||
ruff check .
|
|
||||||
else
|
|
||||||
pytest tests/e2e
|
|
||||||
fi
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -38,6 +38,9 @@ htmlcov/
|
|||||||
# Mypy cache
|
# Mypy cache
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
|
||||||
|
# Linting cache
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
*.log
|
*.log
|
||||||
logs/
|
logs/
|
||||||
@@ -46,5 +49,5 @@ logs/
|
|||||||
*.sqlite3
|
*.sqlite3
|
||||||
test*.db
|
test*.db
|
||||||
|
|
||||||
# Docker files
|
# Act runner files
|
||||||
.runner
|
.runner
|
||||||
|
|||||||
128
Dockerfile
128
Dockerfile
@@ -1,35 +1,111 @@
|
|||||||
# Multi-stage Dockerfile to keep final image small
|
# syntax=docker/dockerfile:1.7
|
||||||
FROM python:3.10-slim AS builder
|
|
||||||
|
|
||||||
# Install build-time packages and Python dependencies in one layer
|
ARG PYTHON_VERSION=3.11-slim
|
||||||
WORKDIR /app
|
ARG APT_CACHE_URL=http://192.168.88.14:3142
|
||||||
COPY requirements.txt /app/requirements.txt
|
|
||||||
RUN echo 'Acquire::http::Proxy "http://192.168.88.14:3142";' > /etc/apt/apt.conf.d/90proxy
|
FROM python:${PYTHON_VERSION} AS builder
|
||||||
RUN apt-get update \
|
ARG APT_CACHE_URL
|
||||||
&& apt-get install -y --no-install-recommends build-essential gcc libpq-dev \
|
|
||||||
&& python -m pip install --upgrade pip \
|
ENV \
|
||||||
&& pip install --no-cache-dir --prefix=/install -r /app/requirements.txt \
|
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||||
&& apt-get purge -y --auto-remove build-essential gcc \
|
PIP_NO_CACHE_DIR=1 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
FROM python:3.10-slim
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy installed packages from builder
|
COPY requirements.txt ./requirements.txt
|
||||||
COPY --from=builder /install /usr/local
|
|
||||||
|
|
||||||
# Assume environment variables for DB config will be set at runtime
|
RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF'
|
||||||
# ENV DATABASE_HOST=your_db_host
|
set -e
|
||||||
# ENV DATABASE_PORT=your_db_port
|
|
||||||
# ENV DATABASE_NAME=your_db_name
|
python3 <<'PY'
|
||||||
# ENV DATABASE_USER=your_db_user
|
import os, socket, urllib.parse
|
||||||
# ENV DATABASE_PASSWORD=your_db_password
|
|
||||||
|
url = os.environ.get('APT_CACHE_URL', '').strip()
|
||||||
|
if url:
|
||||||
|
parsed = urllib.parse.urlparse(url)
|
||||||
|
host = parsed.hostname
|
||||||
|
port = parsed.port or (80 if parsed.scheme == 'http' else 443)
|
||||||
|
if host:
|
||||||
|
sock = socket.socket()
|
||||||
|
sock.settimeout(1)
|
||||||
|
try:
|
||||||
|
sock.connect((host, port))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
with open('/etc/apt/apt.conf.d/01proxy', 'w', encoding='utf-8') as fh:
|
||||||
|
fh.write(f"Acquire::http::Proxy \"{url}\";\n")
|
||||||
|
fh.write(f"Acquire::https::Proxy \"{url}\";\n")
|
||||||
|
finally:
|
||||||
|
sock.close()
|
||||||
|
PY
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends build-essential gcc libpq-dev
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip wheel --no-deps --wheel-dir /wheels -r requirements.txt
|
||||||
|
apt-get purge -y --auto-remove build-essential gcc
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
EOF
|
||||||
|
|
||||||
|
FROM python:${PYTHON_VERSION} AS runtime
|
||||||
|
ARG APT_CACHE_URL
|
||||||
|
|
||||||
|
ENV \
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||||
|
PIP_NO_CACHE_DIR=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1 \
|
||||||
|
PATH="/home/appuser/.local/bin:${PATH}"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN groupadd --system app && useradd --system --create-home --gid app appuser
|
||||||
|
|
||||||
|
RUN /bin/bash <<'EOF'
|
||||||
|
set -e
|
||||||
|
|
||||||
|
python3 <<'PY'
|
||||||
|
import os, socket, urllib.parse
|
||||||
|
|
||||||
|
url = os.environ.get('APT_CACHE_URL', '').strip()
|
||||||
|
if url:
|
||||||
|
parsed = urllib.parse.urlparse(url)
|
||||||
|
host = parsed.hostname
|
||||||
|
port = parsed.port or (80 if parsed.scheme == 'http' else 443)
|
||||||
|
if host:
|
||||||
|
sock = socket.socket()
|
||||||
|
sock.settimeout(1)
|
||||||
|
try:
|
||||||
|
sock.connect((host, port))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
with open('/etc/apt/apt.conf.d/01proxy', 'w', encoding='utf-8') as fh:
|
||||||
|
fh.write(f"Acquire::http::Proxy \"{url}\";\n")
|
||||||
|
fh.write(f"Acquire::https::Proxy \"{url}\";\n")
|
||||||
|
finally:
|
||||||
|
sock.close()
|
||||||
|
PY
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends libpq5
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
EOF
|
||||||
|
|
||||||
|
COPY --from=builder /wheels /wheels
|
||||||
|
COPY --from=builder /app/requirements.txt /tmp/requirements.txt
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip \
|
||||||
|
&& pip install --no-cache-dir --find-links=/wheels -r /tmp/requirements.txt \
|
||||||
|
&& rm -rf /wheels /tmp/requirements.txt
|
||||||
|
|
||||||
# Copy application code
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
# Expose service port
|
RUN chown -R appuser:app /app
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
# Run the FastAPI app with uvicorn
|
USER appuser
|
||||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
|
EXPOSE 8003
|
||||||
|
|
||||||
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8003", "--workers", "4"]
|
||||||
|
|||||||
97
README.md
97
README.md
@@ -6,24 +6,32 @@ Focuses on ore mining operations and covering parameters such as capital and ope
|
|||||||
|
|
||||||
The system is designed to help mining companies make informed decisions by simulating various scenarios and analyzing potential outcomes based on stochastic variables.
|
The system is designed to help mining companies make informed decisions by simulating various scenarios and analyzing potential outcomes based on stochastic variables.
|
||||||
|
|
||||||
A range of features are implemented to support these functionalities.
|
## Current Features
|
||||||
|
|
||||||
## Features
|
> [!TIP]
|
||||||
|
> TODO: Update this section to reflect the current feature set.
|
||||||
|
|
||||||
|
| Feature | Category | Description | Status |
|
||||||
|
| ---------------------- | ----------- | ------------------------------------------------------------------------------------ | ----------- |
|
||||||
|
| Scenario Management | Core | Manage multiple mining scenarios with independent parameter sets and outputs. | Done |
|
||||||
|
| Parameter Definition | Core | Define and manage various parameters for each scenario. | Done |
|
||||||
|
| Cost Tracking | Financial | Capture and analyze capital and operational expenditures. | Done |
|
||||||
|
| Consumption Tracking | Operational | Record resource consumption tied to scenarios. | Done |
|
||||||
|
| Production Output | Operational | Store and analyze production metrics such as tonnage, recovery, and revenue drivers. | Done |
|
||||||
|
| Equipment Management | Operational | Manage equipment inventories and specifications for each scenario. | Done |
|
||||||
|
| Maintenance Logging | Operational | Log maintenance events and costs associated with equipment. | Started |
|
||||||
|
| Reporting Dashboard | Analytics | View aggregated statistics and visualizations for scenario outputs. | In Progress |
|
||||||
|
| Monte Carlo Simulation | Analytics | Run stochastic simulations to assess risk and variability in outcomes. | Started |
|
||||||
|
| Application Settings | Core | Manage global application settings such as themes and currency options. | Done |
|
||||||
|
|
||||||
|
## Key UI/UX Features
|
||||||
|
|
||||||
- **Scenario Management**: Manage multiple mining scenarios with independent parameter sets and outputs.
|
|
||||||
- **Process Parameters**: Define and persist process inputs via FastAPI endpoints and template-driven forms.
|
|
||||||
- **Cost Tracking**: Capture capital (`capex`) and operational (`opex`) expenditures per scenario.
|
|
||||||
- **Consumption Tracking**: Record resource consumption (chemicals, fuel, water, scrap) tied to scenarios.
|
|
||||||
- **Production Output**: Store production metrics such as tonnage, recovery, and revenue drivers.
|
|
||||||
- **Equipment Management**: Register scenario-specific equipment inventories.
|
|
||||||
- **Maintenance Logging**: Log maintenance events against equipment with dates and costs.
|
|
||||||
- **Reporting Dashboard**: Surface aggregated statistics for simulation outputs with an interactive Chart.js dashboard.
|
|
||||||
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
||||||
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
|
||||||
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
|
||||||
- **Settings Center**: The Settings landing page exposes visual theme controls and links to currency administration, backed by persisted application settings and environment overrides.
|
|
||||||
- **Modular Frontend Scripts**: Page-specific interactions in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
- **Modular Frontend Scripts**: Page-specific interactions in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
||||||
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
|
||||||
|
## Planned Features
|
||||||
|
|
||||||
|
See [Roadmap](docs/roadmap.md) for details on planned features and enhancements.
|
||||||
|
|
||||||
## Documentation & quickstart
|
## Documentation & quickstart
|
||||||
|
|
||||||
@@ -45,47 +53,52 @@ The repository ships with a multi-stage `Dockerfile` that produces a slim runtim
|
|||||||
|
|
||||||
### Build container
|
### Build container
|
||||||
|
|
||||||
```powershell
|
```bash
|
||||||
# Build the image locally
|
docker build -t calminer .
|
||||||
docker build -t calminer:latest .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Push to registry
|
### Push to registry
|
||||||
|
|
||||||
```powershell
|
To push the image to a registry, tag it appropriately and push:
|
||||||
# Tag and push the image to your registry
|
|
||||||
docker login your-registry.com -u your-username -p your-password
|
```bash
|
||||||
docker tag calminer:latest your-registry.com/your-namespace/calminer:latest
|
docker tag calminer your-registry/calminer:latest
|
||||||
docker push your-registry.com/your-namespace/calminer:latest
|
docker push your-registry/calminer:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
### Run container
|
### Run container
|
||||||
|
|
||||||
Expose FastAPI on <http://localhost:8000> with database configuration via granular environment variables:
|
To run the container, ensure PostgreSQL is available and set environment variables:
|
||||||
|
|
||||||
```powershell
|
```bash
|
||||||
# Provide database configuration via granular environment variables
|
docker run -p 8000:8000 \
|
||||||
docker run --rm -p 8000:8000 ^
|
-e DATABASE_HOST=your-postgres-host \
|
||||||
-e DATABASE_DRIVER="postgresql" ^
|
-e DATABASE_PORT=5432 \
|
||||||
-e DATABASE_HOST="db.host" ^
|
-e DATABASE_USER=calminer \
|
||||||
-e DATABASE_PORT="5432" ^
|
-e DATABASE_PASSWORD=your-password \
|
||||||
-e DATABASE_USER="calminer" ^
|
-e DATABASE_NAME=calminer_db \
|
||||||
-e DATABASE_PASSWORD="s3cret" ^
|
calminer
|
||||||
-e DATABASE_NAME="calminer" ^
|
|
||||||
-e DATABASE_SCHEMA="public" ^
|
|
||||||
calminer:latest
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Orchestrated Deployment
|
## Development with Docker Compose
|
||||||
|
|
||||||
Use `docker compose` or an orchestrator of your choice to co-locate PostgreSQL/Redis alongside the app when needed. The image expects migrations to be applied before startup.
|
For local development, use `docker-compose.yml` which includes the app and PostgreSQL services.
|
||||||
|
|
||||||
## CI/CD expectations
|
```bash
|
||||||
|
# Start services
|
||||||
|
docker-compose up
|
||||||
|
|
||||||
|
# Or run in background
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
# Stop services
|
||||||
|
docker-compose down
|
||||||
|
```
|
||||||
|
|
||||||
|
The app will be available at `http://localhost:8000`, PostgreSQL at `localhost:5432`.
|
||||||
|
|
||||||
|
## CI/CD
|
||||||
|
|
||||||
CalMiner uses Gitea Actions workflows stored in `.gitea/workflows/`:
|
CalMiner uses Gitea Actions workflows stored in `.gitea/workflows/`:
|
||||||
|
|
||||||
- `test.yml` runs style/unit/e2e suites on every push with cached Python dependencies.
|
- `ci.yml`: Runs on push and PR to main/develop branches. Sets up Python, installs dependencies, runs tests with coverage, and builds the Docker image.
|
||||||
- `build-and-push.yml` builds the Docker image, reuses cached layers, and pushes to the configured registry.
|
|
||||||
- `deploy.yml` pulls the pushed image on the target host and restarts the container.
|
|
||||||
|
|
||||||
Pipelines assume the following secrets are provisioned in the Gitea instance: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, and `SSH_PRIVATE_KEY`.
|
|
||||||
|
|||||||
0
backups/.gitkeep
Normal file
0
backups/.gitkeep
Normal file
35
config/setup_production.env.example
Normal file
35
config/setup_production.env.example
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Copy this file to config/setup_production.env and replace values with production secrets
|
||||||
|
|
||||||
|
# Container image and runtime configuration
|
||||||
|
CALMINER_IMAGE=registry.example.com/calminer/api:latest
|
||||||
|
CALMINER_DOMAIN=calminer.example.com
|
||||||
|
TRAEFIK_ACME_EMAIL=ops@example.com
|
||||||
|
CALMINER_API_PORT=8000
|
||||||
|
UVICORN_WORKERS=4
|
||||||
|
UVICORN_LOG_LEVEL=info
|
||||||
|
CALMINER_NETWORK=calminer_backend
|
||||||
|
API_LIMIT_CPUS=1.0
|
||||||
|
API_LIMIT_MEMORY=1g
|
||||||
|
API_RESERVATION_MEMORY=512m
|
||||||
|
TRAEFIK_LIMIT_CPUS=0.5
|
||||||
|
TRAEFIK_LIMIT_MEMORY=512m
|
||||||
|
POSTGRES_LIMIT_CPUS=1.0
|
||||||
|
POSTGRES_LIMIT_MEMORY=2g
|
||||||
|
POSTGRES_RESERVATION_MEMORY=1g
|
||||||
|
|
||||||
|
# Application database connection
|
||||||
|
DATABASE_DRIVER=postgresql+psycopg2
|
||||||
|
DATABASE_HOST=production-db.internal
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_NAME=calminer
|
||||||
|
DATABASE_USER=calminer_app
|
||||||
|
DATABASE_PASSWORD=ChangeMe123!
|
||||||
|
DATABASE_SCHEMA=public
|
||||||
|
|
||||||
|
# Optional consolidated SQLAlchemy URL (overrides granular settings when set)
|
||||||
|
# DATABASE_URL=postgresql+psycopg2://calminer_app:ChangeMe123!@production-db.internal:5432/calminer
|
||||||
|
|
||||||
|
# Superuser credentials used by scripts/setup_database.py for migrations/seed data
|
||||||
|
DATABASE_SUPERUSER=postgres
|
||||||
|
DATABASE_SUPERUSER_PASSWORD=ChangeMeSuper123!
|
||||||
|
DATABASE_SUPERUSER_DB=postgres
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
version: "3.9"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:16-alpine
|
|
||||||
container_name: calminer_postgres_local
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: calminer_local
|
|
||||||
POSTGRES_USER: calminer
|
|
||||||
POSTGRES_PASSWORD: secret
|
|
||||||
ports:
|
|
||||||
- "5433:5432"
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "pg_isready -U calminer -d calminer_local"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 10
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
36
docker-compose.yml
Normal file
36
docker-compose.yml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
ports:
|
||||||
|
- "8003:8003"
|
||||||
|
environment:
|
||||||
|
- DATABASE_HOST=postgres
|
||||||
|
- DATABASE_PORT=5432
|
||||||
|
- DATABASE_USER=calminer
|
||||||
|
- DATABASE_PASSWORD=calminer_password
|
||||||
|
- DATABASE_NAME=calminer_db
|
||||||
|
- DATABASE_DRIVER=postgresql
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
volumes:
|
||||||
|
- ./logs:/app/logs
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=calminer
|
||||||
|
- POSTGRES_PASSWORD=calminer_password
|
||||||
|
- POSTGRES_DB=calminer_db
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
@@ -1,66 +1,18 @@
|
|||||||
---
|
---
|
||||||
title: "02 — Architecture Constraints"
|
title: '02 — Architecture Constraints'
|
||||||
description: "Document imposed constraints: technical, organizational, regulatory, and environmental constraints that affect architecture decisions."
|
description: 'Document imposed constraints: technical, organizational, regulatory, and environmental constraints that affect architecture decisions.'
|
||||||
status: skeleton
|
status: draft
|
||||||
---
|
---
|
||||||
|
|
||||||
# 02 — Architecture Constraints
|
# 02 — Architecture Constraints
|
||||||
|
|
||||||
## Technical Constraints
|
## Constraints Overview
|
||||||
|
|
||||||
> e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates.
|
- [Technical Constraints](02_constraints/02_01_technical_constraints.md)
|
||||||
|
- [Organizational Constraints](02_constraints/02_02_organizational_constraints.md)
|
||||||
The architecture of CalMiner is influenced by several technical constraints that shape its design and implementation:
|
- [Regulatory Constraints](02_constraints/02_03_regulatory_constraints.md)
|
||||||
|
- [Environmental Constraints](02_constraints/02_04_environmental_constraints.md)
|
||||||
1. **Framework Selection**: The choice of FastAPI as the web framework imposes constraints on how the application handles requests, routing, and middleware. FastAPI's asynchronous capabilities must be leveraged appropriately to ensure optimal performance.
|
- [Performance Constraints](02_constraints/02_05_performance_constraints.md)
|
||||||
2. **Database Technology**: The use of PostgreSQL as the primary database system dictates the data modeling, querying capabilities, and transaction management strategies. SQLAlchemy ORM is used for database interactions, which requires adherence to its conventions and limitations.
|
|
||||||
3. **Frontend Technologies**: The decision to use Jinja2 for server-side templating and Chart.js for data visualization influences the structure of the frontend code and the way dynamic content is rendered.
|
|
||||||
4. **Simulation Logic**: The Monte Carlo simulation logic must be designed to efficiently handle large datasets and perform computations within the constraints of the chosen programming language (Python) and its libraries.
|
|
||||||
|
|
||||||
## Organizational Constraints
|
|
||||||
|
|
||||||
> e.g., team skillsets, development workflows, CI/CD pipelines.
|
|
||||||
|
|
||||||
Restrictions arising from organizational factors include:
|
|
||||||
|
|
||||||
1. **Team Expertise**: The development team’s familiarity with FastAPI, SQLAlchemy, and frontend technologies like Jinja2 and Chart.js influences the architecture choices to ensure maintainability and ease of development.
|
|
||||||
2. **Development Processes**: The adoption of Agile methodologies and CI/CD pipelines (using Gitea Actions) shapes the architecture to support continuous integration, automated testing, and deployment practices.
|
|
||||||
3. **Collaboration Tools**: The use of specific collaboration and version control tools (e.g., Gitea) affects how code is managed, reviewed, and integrated, impacting the overall architecture and development workflow.
|
|
||||||
4. **Documentation Standards**: The requirement for comprehensive documentation (as seen in the `docs/` folder) necessitates an architecture that is well-structured and easy to understand for both current and future team members.
|
|
||||||
5. **Knowledge Sharing**: The need for effective knowledge sharing and onboarding processes influences the architecture to ensure that it is accessible and understandable for new team members.
|
|
||||||
6. **Resource Availability**: The availability of hardware, software, and human resources within the organization can impose constraints on the architecture, affecting decisions related to scalability, performance, and feature implementation.
|
|
||||||
|
|
||||||
## Regulatory Constraints
|
|
||||||
|
|
||||||
> e.g., data privacy laws, industry standards.
|
|
||||||
|
|
||||||
Regulatory constraints that impact the architecture of CalMiner include:
|
|
||||||
|
|
||||||
1. **Data Privacy Compliance**: The architecture must ensure compliance with data privacy regulations such as GDPR or CCPA, which may dictate how user data is collected, stored, and processed.
|
|
||||||
2. **Industry Standards**: Adherence to industry-specific standards and best practices may influence the design of data models, security measures, and reporting functionalities.
|
|
||||||
3. **Auditability**: The system may need to incorporate logging and auditing features to meet regulatory requirements, affecting the architecture of data storage and access controls.
|
|
||||||
4. **Data Retention Policies**: Regulatory requirements regarding data retention and deletion may impose constraints on how long certain types of data can be stored, influencing database design and data lifecycle management.
|
|
||||||
5. **Security Standards**: Compliance with security standards (e.g., ISO/IEC 27001) may necessitate the implementation of specific security measures, such as encryption, access controls, and vulnerability management, which impact the overall architecture.
|
|
||||||
|
|
||||||
## Environmental Constraints
|
|
||||||
|
|
||||||
> e.g., deployment environments, cloud provider limitations.
|
|
||||||
|
|
||||||
Environmental constraints affecting the architecture include:
|
|
||||||
|
|
||||||
1. **Deployment Environments**: The architecture must accommodate various deployment environments (development, testing, production) with differing configurations and resource allocations.
|
|
||||||
2. **Cloud Provider Limitations**: If deployed on a specific cloud provider, the architecture may need to align with the provider's services, limitations, and best practices, such as using managed databases or specific container orchestration tools.
|
|
||||||
3. **Containerization**: The use of Docker for containerization imposes constraints on how the application is packaged, deployed, and scaled, influencing the architecture to ensure compatibility with container orchestration platforms.
|
|
||||||
4. **Scalability Requirements**: The architecture must be designed to scale efficiently based on anticipated load and usage patterns, considering the limitations of the chosen infrastructure.
|
|
||||||
|
|
||||||
## Performance Constraints
|
|
||||||
|
|
||||||
> e.g., response time requirements, scalability needs.
|
|
||||||
|
|
||||||
Current performance constraints include:
|
|
||||||
|
|
||||||
1. **Response Time Requirements**: The architecture must ensure that the system can respond to user requests within a specified time frame, which may impact design decisions related to caching, database queries, and API performance.
|
|
||||||
2. **Scalability Needs**: The system should be able to handle increased load and user traffic without significant degradation in performance, necessitating a scalable architecture that can grow with demand.
|
|
||||||
|
|
||||||
## Security Constraints
|
## Security Constraints
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
---
|
||||||
|
title: '02 — Technical Constraints'
|
||||||
|
description: 'Technical constraints that affect architecture decisions.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Technical Constraints
|
||||||
|
|
||||||
|
> e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates.
|
||||||
|
|
||||||
|
The architecture of CalMiner is influenced by several technical constraints that shape its design and implementation:
|
||||||
|
|
||||||
|
1. **Framework Selection**: The choice of FastAPI as the web framework imposes constraints on how the application handles requests, routing, and middleware. FastAPI's asynchronous capabilities must be leveraged appropriately to ensure optimal performance.
|
||||||
|
2. **Database Technology**: The use of PostgreSQL as the primary database system dictates the data modeling, querying capabilities, and transaction management strategies. SQLAlchemy ORM is used for database interactions, which requires adherence to its conventions and limitations.
|
||||||
|
3. **Frontend Technologies**: The decision to use Jinja2 for server-side templating and Chart.js for data visualization influences the structure of the frontend code and the way dynamic content is rendered.
|
||||||
|
4. **Simulation Logic**: The Monte Carlo simulation logic must be designed to efficiently handle large datasets and perform computations within the constraints of the chosen programming language (Python) and its libraries.
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
---
|
||||||
|
title: '02 — Organizational Constraints'
|
||||||
|
description: 'Organizational constraints that affect architecture decisions.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Organizational Constraints
|
||||||
|
|
||||||
|
> e.g., team skillsets, development workflows, CI/CD pipelines.
|
||||||
|
|
||||||
|
Restrictions arising from organizational factors include:
|
||||||
|
|
||||||
|
1. **Team Expertise**: The development team’s familiarity with FastAPI, SQLAlchemy, and frontend technologies like Jinja2 and Chart.js influences the architecture choices to ensure maintainability and ease of development.
|
||||||
|
2. **Development Processes**: The adoption of Agile methodologies and CI/CD pipelines (using Gitea Actions) shapes the architecture to support continuous integration, automated testing, and deployment practices.
|
||||||
|
3. **Collaboration Tools**: The use of specific collaboration and version control tools (e.g., Gitea) affects how code is managed, reviewed, and integrated, impacting the overall architecture and development workflow.
|
||||||
|
4. **Documentation Standards**: The requirement for comprehensive documentation (as seen in the `docs/` folder) necessitates an architecture that is well-structured and easy to understand for both current and future team members.
|
||||||
|
5. **Knowledge Sharing**: The need for effective knowledge sharing and onboarding processes influences the architecture to ensure that it is accessible and understandable for new team members.
|
||||||
|
6. **Resource Availability**: The availability of hardware, software, and human resources within the organization can impose constraints on the architecture, affecting decisions related to scalability, performance, and feature implementation.
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
title: '02 — Regulatory Constraints'
|
||||||
|
description: 'Regulatory constraints that affect architecture decisions.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Regulatory Constraints
|
||||||
|
|
||||||
|
> e.g., data privacy laws, industry standards.
|
||||||
|
|
||||||
|
Regulatory constraints that impact the architecture of CalMiner include:
|
||||||
|
|
||||||
|
1. **Data Privacy Compliance**: The architecture must ensure compliance with data privacy regulations such as GDPR or CCPA, which may dictate how user data is collected, stored, and processed.
|
||||||
|
2. **Industry Standards**: Adherence to industry-specific standards and best practices may influence the design of data models, security measures, and reporting functionalities.
|
||||||
|
3. **Auditability**: The system may need to incorporate logging and auditing features to meet regulatory requirements, affecting the architecture of data storage and access controls.
|
||||||
|
4. **Data Retention Policies**: Regulatory requirements regarding data retention and deletion may impose constraints on how long certain types of data can be stored, influencing database design and data lifecycle management.
|
||||||
|
5. **Security Standards**: Compliance with security standards (e.g., ISO/IEC 27001) may necessitate the implementation of specific security measures, such as encryption, access controls, and vulnerability management, which impact the overall architecture.
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
---
|
||||||
|
title: '02 — Environmental Constraints'
|
||||||
|
description: 'Environmental constraints that affect architecture decisions.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Environmental Constraints
|
||||||
|
|
||||||
|
> e.g., deployment environments, cloud provider limitations.
|
||||||
|
|
||||||
|
Environmental constraints affecting the architecture include:
|
||||||
|
|
||||||
|
1. **Deployment Environments**: The architecture must accommodate various deployment environments (development, testing, production) with differing configurations and resource allocations.
|
||||||
|
2. **Cloud Provider Limitations**: If deployed on a specific cloud provider, the architecture may need to align with the provider's services, limitations, and best practices, such as using managed databases or specific container orchestration tools.
|
||||||
|
3. **Containerization**: The use of Docker for containerization imposes constraints on how the application is packaged, deployed, and scaled, influencing the architecture to ensure compatibility with container orchestration platforms.
|
||||||
|
4. **Scalability Requirements**: The architecture must be designed to scale efficiently based on anticipated load and usage patterns, considering the limitations of the chosen infrastructure.
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
title: '02 — Performance Constraints'
|
||||||
|
description: 'Performance constraints that affect architecture decisions.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Performance Constraints
|
||||||
|
|
||||||
|
> e.g., response time requirements, scalability needs.
|
||||||
|
|
||||||
|
Current performance constraints include:
|
||||||
|
|
||||||
|
1. **Response Time Requirements**: The architecture must ensure that the system can respond to user requests within a specified time frame, which may impact design decisions related to caching, database queries, and API performance.
|
||||||
|
2. **Scalability Needs**: The system should be able to handle increased load and user traffic without significant degradation in performance, necessitating a scalable architecture that can grow with demand.
|
||||||
@@ -18,24 +18,7 @@ The CalMiner system operates within the context of mining project management, pr
|
|||||||
|
|
||||||
## Scope of the Architecture
|
## Scope of the Architecture
|
||||||
|
|
||||||
The architecture encompasses the following key areas:
|
See [Architecture Scope](03_scope/03_01_architecture_scope.md) for details.
|
||||||
|
|
||||||
1. **Data Ingestion**: Mechanisms for collecting and processing data from various sources.
|
|
||||||
2. **Data Storage**: Solutions for storing and managing historical and real-time data.
|
|
||||||
3. **Simulation Engine**: Core algorithms and models for scenario analysis.
|
|
||||||
3.1. **Modeling Framework**: Tools for defining and managing simulation models.
|
|
||||||
3.2. **Parameter Management**: Systems for handling input parameters and configurations.
|
|
||||||
3.3. **Execution Engine**: Infrastructure for running simulations and processing results.
|
|
||||||
3.4. **Result Storage**: Systems for storing simulation outputs for analysis and reporting.
|
|
||||||
4. **Financial Reporting**: Tools for generating reports and visualizations based on simulation outcomes.
|
|
||||||
5. **Risk Assessment**: Frameworks for identifying and evaluating potential project risks.
|
|
||||||
6. **Profitability Analysis**: Modules for calculating and analyzing project profitability metrics.
|
|
||||||
7. **User Interface**: Design and implementation of the user-facing components of the system.
|
|
||||||
8. **Security and Compliance**: Measures to ensure data security and regulatory compliance.
|
|
||||||
9. **Scalability and Performance**: Strategies for ensuring the system can handle increasing data volumes and user loads.
|
|
||||||
10. **Integration Points**: Interfaces for integrating with external systems and services.
|
|
||||||
11. **Monitoring and Logging**: Systems for tracking system performance and user activity.
|
|
||||||
12. **Maintenance and Support**: Processes for ongoing system maintenance and user support.
|
|
||||||
|
|
||||||
## Diagram
|
## Diagram
|
||||||
|
|
||||||
|
|||||||
26
docs/architecture/03_scope/03_01_architecture_scope.md
Normal file
26
docs/architecture/03_scope/03_01_architecture_scope.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
title: '03 — Architecture Scope'
|
||||||
|
description: 'Key areas encompassed by the architecture.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
# Architecture Scope
|
||||||
|
|
||||||
|
The architecture encompasses the following key areas:
|
||||||
|
|
||||||
|
1. **Data Ingestion**: Mechanisms for collecting and processing data from various sources.
|
||||||
|
2. **Data Storage**: Solutions for storing and managing historical and real-time data.
|
||||||
|
3. **Simulation Engine**: Core algorithms and models for scenario analysis.
|
||||||
|
3.1. **Modeling Framework**: Tools for defining and managing simulation models.
|
||||||
|
3.2. **Parameter Management**: Systems for handling input parameters and configurations.
|
||||||
|
3.3. **Execution Engine**: Infrastructure for running simulations and processing results.
|
||||||
|
3.4. **Result Storage**: Systems for storing simulation outputs for analysis and reporting.
|
||||||
|
4. **Financial Reporting**: Tools for generating reports and visualizations based on simulation outcomes.
|
||||||
|
5. **Risk Assessment**: Frameworks for identifying and evaluating potential project risks.
|
||||||
|
6. **Profitability Analysis**: Modules for calculating and analyzing project profitability metrics.
|
||||||
|
7. **User Interface**: Design and implementation of the user-facing components of the system.
|
||||||
|
8. **Security and Compliance**: Measures to ensure data security and regulatory compliance.
|
||||||
|
9. **Scalability and Performance**: Strategies for ensuring the system can handle increasing data volumes and user loads.
|
||||||
|
10. **Integration Points**: Interfaces for integrating with external systems and services.
|
||||||
|
11. **Monitoring and Logging**: Systems for tracking system performance and user activity.
|
||||||
|
12. **Maintenance and Support**: Processes for ongoing system maintenance and user support.
|
||||||
@@ -8,42 +8,9 @@ status: draft
|
|||||||
|
|
||||||
This section outlines the high-level solution strategy for implementing the CalMiner system, focusing on major approaches, technology choices, and trade-offs.
|
This section outlines the high-level solution strategy for implementing the CalMiner system, focusing on major approaches, technology choices, and trade-offs.
|
||||||
|
|
||||||
## Client-Server Architecture
|
## Solution Strategy Overview
|
||||||
|
|
||||||
- **Backend**: FastAPI serves as the backend framework, providing RESTful APIs for data management, simulation execution, and reporting. It leverages SQLAlchemy for ORM-based database interactions with PostgreSQL.
|
- [Client-Server Architecture](04_strategy/04_01_client_server_architecture.md)
|
||||||
- **Frontend**: Server-rendered Jinja2 templates deliver dynamic HTML views, enhanced with Chart.js for interactive data visualizations. This approach balances performance and simplicity, avoiding the complexity of a full SPA.
|
- [Technology Choices](04_strategy/04_02_technology_choices.md)
|
||||||
- **Middleware**: Custom middleware handles JSON validation to ensure data integrity before processing requests.
|
- [Trade-offs](04_strategy/04_03_trade_offs.md)
|
||||||
|
- [Future Considerations](04_strategy/04_04_future_considerations.md)
|
||||||
## Technology Choices
|
|
||||||
|
|
||||||
- **FastAPI**: Chosen for its high performance, ease of use, and modern features like async support and automatic OpenAPI documentation.
|
|
||||||
- **PostgreSQL**: Selected for its robustness, scalability, and support for complex queries, making it suitable for handling the diverse data needs of mining project management.
|
|
||||||
- **SQLAlchemy**: Provides a flexible and powerful ORM layer, facilitating database interactions while maintaining code readability and maintainability.
|
|
||||||
- **Chart.js**: Utilized for its simplicity and effectiveness in rendering interactive charts, enhancing the user experience on the dashboard.
|
|
||||||
- **Jinja2**: Enables server-side rendering of HTML templates, allowing for dynamic content generation while keeping the frontend lightweight.
|
|
||||||
- **Pydantic**: Used for data validation and serialization, ensuring that incoming request payloads conform to expected schemas.
|
|
||||||
- **Docker**: Employed for containerization, ensuring consistent deployment across different environments and simplifying dependency management.
|
|
||||||
- **Redis**: Used as an in-memory data store to cache frequently accessed data, improving application performance and reducing database load.
|
|
||||||
|
|
||||||
## Trade-offs
|
|
||||||
|
|
||||||
- **Server-Rendered vs. SPA**: Opted for server-rendered templates over a single-page application (SPA) to reduce complexity and improve initial load times, at the cost of some interactivity.
|
|
||||||
- **Synchronous vs. Asynchronous**: While FastAPI supports async operations, the initial implementation focuses on synchronous request handling for simplicity, with plans to introduce async features as needed.
|
|
||||||
- **Monolithic vs. Microservices**: The initial architecture follows a monolithic approach for ease of development and deployment, with the possibility of refactoring into microservices as the system scales.
|
|
||||||
- **In-Memory Caching**: Implementing Redis for caching introduces additional infrastructure complexity but significantly enhances performance for read-heavy operations.
|
|
||||||
- **Database Choice**: PostgreSQL was chosen over NoSQL alternatives due to the structured nature of the data and the need for complex querying capabilities, despite potential scalability challenges.
|
|
||||||
- **Technology Familiarity**: Selected technologies align with the team's existing skill set to minimize the learning curve and accelerate development, even if some alternatives may offer marginally better performance or features.
|
|
||||||
- **Extensibility vs. Simplicity**: The architecture is designed to be extensible for future features (e.g., Monte Carlo simulation engine) while maintaining simplicity in the initial implementation to ensure timely delivery of core functionalities.
|
|
||||||
|
|
||||||
## Future Considerations
|
|
||||||
|
|
||||||
- **Scalability**: As the user base grows, consider transitioning to a microservices architecture and implementing load balancing strategies.
|
|
||||||
- **Asynchronous Processing**: Introduce asynchronous task queues (e.g., Celery) for long-running simulations to improve responsiveness.
|
|
||||||
- **Enhanced Frontend**: Explore the possibility of integrating a frontend framework (e.g., React or Vue.js) for more dynamic user interactions in future iterations.
|
|
||||||
- **Advanced Analytics**: Plan for integrating advanced analytics and machine learning capabilities to enhance simulation accuracy and reporting insights.
|
|
||||||
- **Security Enhancements**: Implement robust authentication and authorization mechanisms to protect sensitive data and ensure compliance with industry standards.
|
|
||||||
- **Continuous Integration/Continuous Deployment (CI/CD)**: Establish CI/CD pipelines to automate testing, building, and deployment processes for faster and more reliable releases.
|
|
||||||
- **Monitoring and Logging**: Integrate monitoring tools (e.g., Prometheus, Grafana) and centralized logging solutions (e.g., ELK stack) to track application performance and troubleshoot issues effectively.
|
|
||||||
- **User Feedback Loop**: Implement mechanisms for collecting user feedback to inform future development priorities and improve user experience.
|
|
||||||
- **Documentation**: Maintain comprehensive documentation for both developers and end-users to facilitate onboarding and effective use of the system.
|
|
||||||
- **Testing Strategy**: Develop a robust testing strategy, including unit, integration, and end-to-end tests, to ensure code quality and reliability as the system evolves.
|
|
||||||
|
|||||||
@@ -1,110 +0,0 @@
|
|||||||
# Implementation Plan 2025-10-20
|
|
||||||
|
|
||||||
This file contains the implementation plan (MVP features, steps, and estimates).
|
|
||||||
|
|
||||||
## Project Setup
|
|
||||||
|
|
||||||
1. Connect to PostgreSQL database with schema `calminer`.
|
|
||||||
1. Create and activate a virtual environment and install dependencies via `requirements.txt`.
|
|
||||||
1. Define database environment variables in `.env` (e.g., `DATABASE_DRIVER`, `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME`, optional `DATABASE_SCHEMA`).
|
|
||||||
1. Configure FastAPI entrypoint in `main.py` to include routers.
|
|
||||||
|
|
||||||
## Feature: Scenario Management
|
|
||||||
|
|
||||||
### Scenario Management — Steps
|
|
||||||
|
|
||||||
1. Create `models/scenario.py` for scenario CRUD.
|
|
||||||
1. Implement API endpoints in `routes/scenarios.py` (GET, POST, PUT, DELETE).
|
|
||||||
1. Write unit tests in `tests/unit/test_scenario.py`.
|
|
||||||
1. Build UI component `components/ScenarioForm.html`.
|
|
||||||
|
|
||||||
## Feature: Process Parameters
|
|
||||||
|
|
||||||
### Parameters — Steps
|
|
||||||
|
|
||||||
1. Create `models/parameters.py` for process parameters.
|
|
||||||
1. Implement Pydantic schemas in `routes/parameters.py`.
|
|
||||||
1. Add validation middleware in `middleware/validation.py`.
|
|
||||||
1. Write unit tests in `tests/unit/test_parameter.py`.
|
|
||||||
1. Build UI component `components/ParameterInput.html`.
|
|
||||||
|
|
||||||
## Feature: Stochastic Variables
|
|
||||||
|
|
||||||
### Stochastic Variables — Steps
|
|
||||||
|
|
||||||
1. Create `models/distribution.py` for variable distributions.
|
|
||||||
1. Implement API routes in `routes/distributions.py`.
|
|
||||||
1. Write Pydantic schemas and validations.
|
|
||||||
1. Write unit tests in `tests/unit/test_distribution.py`.
|
|
||||||
1. Build UI component `components/DistributionEditor.html`.
|
|
||||||
|
|
||||||
## Feature: Cost Tracking
|
|
||||||
|
|
||||||
### Cost Tracking — Steps
|
|
||||||
|
|
||||||
1. Create `models/capex.py` and `models/opex.py`.
|
|
||||||
1. Implement API routes in `routes/costs.py`.
|
|
||||||
1. Write Pydantic schemas for CAPEX/OPEX.
|
|
||||||
1. Write unit tests in `tests/unit/test_costs.py`.
|
|
||||||
1. Build UI component `components/CostForm.html`.
|
|
||||||
|
|
||||||
## Feature: Consumption Tracking
|
|
||||||
|
|
||||||
### Consumption Tracking — Steps
|
|
||||||
|
|
||||||
1. Create models for consumption: `chemical_consumption.py`, `fuel_consumption.py`, `water_consumption.py`, `scrap_consumption.py`.
|
|
||||||
1. Implement API routes in `routes/consumption.py`.
|
|
||||||
1. Write Pydantic schemas for consumption data.
|
|
||||||
1. Write unit tests in `tests/unit/test_consumption.py`.
|
|
||||||
1. Build UI component `components/ConsumptionDashboard.html`.
|
|
||||||
|
|
||||||
## Feature: Production Output
|
|
||||||
|
|
||||||
### Production Output — Steps
|
|
||||||
|
|
||||||
1. Create `models/production_output.py`.
|
|
||||||
1. Implement API routes in `routes/production.py`.
|
|
||||||
1. Write Pydantic schemas for production output.
|
|
||||||
1. Write unit tests in `tests/unit/test_production.py`.
|
|
||||||
1. Build UI component `components/ProductionChart.html`.
|
|
||||||
|
|
||||||
## Feature: Equipment Management
|
|
||||||
|
|
||||||
### Equipment Management — Steps
|
|
||||||
|
|
||||||
1. Create `models/equipment.py` for equipment data.
|
|
||||||
1. Implement API routes in `routes/equipment.py`.
|
|
||||||
1. Write Pydantic schemas for equipment.
|
|
||||||
1. Write unit tests in `tests/unit/test_equipment.py`.
|
|
||||||
1. Build UI component `components/EquipmentList.html`.
|
|
||||||
|
|
||||||
## Feature: Maintenance Logging
|
|
||||||
|
|
||||||
### Maintenance Logging — Steps
|
|
||||||
|
|
||||||
1. Create `models/maintenance.py` for maintenance events.
|
|
||||||
1. Implement API routes in `routes/maintenance.py`.
|
|
||||||
1. Write Pydantic schemas for maintenance logs.
|
|
||||||
1. Write unit tests in `tests/unit/test_maintenance.py`.
|
|
||||||
1. Build UI component `components/MaintenanceLog.html`.
|
|
||||||
|
|
||||||
## Feature: Monte Carlo Simulation Engine
|
|
||||||
|
|
||||||
### Monte Carlo Engine — Steps
|
|
||||||
|
|
||||||
1. Implement Monte Carlo logic in `services/simulation.py`.
|
|
||||||
1. Persist results in `models/simulation_result.py`.
|
|
||||||
1. Expose endpoint in `routes/simulations.py`.
|
|
||||||
1. Write integration tests in `tests/unit/test_simulation.py`.
|
|
||||||
1. Build UI component `components/SimulationRunner.html`.
|
|
||||||
|
|
||||||
## Feature: Reporting / Dashboard
|
|
||||||
|
|
||||||
### Reporting / Dashboard — Steps
|
|
||||||
|
|
||||||
1. Implement report calculations in `services/reporting.py`.
|
|
||||||
1. Add detailed and summary endpoints in `routes/reporting.py`.
|
|
||||||
1. Write unit tests in `tests/unit/test_reporting.py`.
|
|
||||||
1. Enhance UI in `components/Dashboard.html` with charts.
|
|
||||||
|
|
||||||
See [UI and Style](../13_ui_and_style.md) for the UI template audit, layout guidance, and next steps.
|
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
title: '04.01 — Client-Server Architecture'
|
||||||
|
description: 'Details on the client-server architecture of CalMiner.'
|
||||||
|
---
|
||||||
|
|
||||||
|
# 04.01 — Client-Server Architecture
|
||||||
|
|
||||||
|
- **Backend**: FastAPI serves as the backend framework, providing RESTful APIs for data management, simulation execution, and reporting. It leverages SQLAlchemy for ORM-based database interactions with PostgreSQL.
|
||||||
|
- **Frontend**: Server-rendered Jinja2 templates deliver dynamic HTML views, enhanced with Chart.js for interactive data visualizations. This approach balances performance and simplicity, avoiding the complexity of a full SPA.
|
||||||
|
- **Middleware**: Custom middleware handles JSON validation to ensure data integrity before processing requests.
|
||||||
15
docs/architecture/04_strategy/04_02_technology_choices.md
Normal file
15
docs/architecture/04_strategy/04_02_technology_choices.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
---
|
||||||
|
title: '04.02 — Technology Choices'
|
||||||
|
description: 'Detailed explanation of technology choices in CalMiner.'
|
||||||
|
---
|
||||||
|
|
||||||
|
# 04.02 — Technology Choices
|
||||||
|
|
||||||
|
- **FastAPI**: Chosen for its high performance, ease of use, and modern features like async support and automatic OpenAPI documentation.
|
||||||
|
- **PostgreSQL**: Selected for its robustness, scalability, and support for complex queries, making it suitable for handling the diverse data needs of mining project management.
|
||||||
|
- **SQLAlchemy**: Provides a flexible and powerful ORM layer, facilitating database interactions while maintaining code readability and maintainability.
|
||||||
|
- **Chart.js**: Utilized for its simplicity and effectiveness in rendering interactive charts, enhancing the user experience on the dashboard.
|
||||||
|
- **Jinja2**: Enables server-side rendering of HTML templates, allowing for dynamic content generation while keeping the frontend lightweight.
|
||||||
|
- **Pydantic**: Used for data validation and serialization, ensuring that incoming request payloads conform to expected schemas.
|
||||||
|
- **Docker**: Employed for containerization, ensuring consistent deployment across different environments and simplifying dependency management.
|
||||||
|
- **Redis**: Used as an in-memory data store to cache frequently accessed data, improving application performance and reducing database load.
|
||||||
14
docs/architecture/04_strategy/04_03_trade_offs.md
Normal file
14
docs/architecture/04_strategy/04_03_trade_offs.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
title: '04.03 — Trade-offs'
|
||||||
|
description: 'Discussion of trade-offs made in the CalMiner architecture.'
|
||||||
|
---
|
||||||
|
|
||||||
|
# 04.03 — Trade-offs
|
||||||
|
|
||||||
|
- **Server-Rendered vs. SPA**: Opted for server-rendered templates over a single-page application (SPA) to reduce complexity and improve initial load times, at the cost of some interactivity.
|
||||||
|
- **Synchronous vs. Asynchronous**: While FastAPI supports async operations, the initial implementation focuses on synchronous request handling for simplicity, with plans to introduce async features as needed.
|
||||||
|
- **Monolithic vs. Microservices**: The initial architecture follows a monolithic approach for ease of development and deployment, with the possibility of refactoring into microservices as the system scales.
|
||||||
|
- **In-Memory Caching**: Implementing Redis for caching introduces additional infrastructure complexity but significantly enhances performance for read-heavy operations.
|
||||||
|
- **Database Choice**: PostgreSQL was chosen over NoSQL alternatives due to the structured nature of the data and the need for complex querying capabilities, despite potential scalability challenges.
|
||||||
|
- **Technology Familiarity**: Selected technologies align with the team's existing skill set to minimize the learning curve and accelerate development, even if some alternatives may offer marginally better performance or features.
|
||||||
|
- **Extensibility vs. Simplicity**: The architecture is designed to be extensible for future features (e.g., Monte Carlo simulation engine) while maintaining simplicity in the initial implementation to ensure timely delivery of core functionalities.
|
||||||
17
docs/architecture/04_strategy/04_04_future_considerations.md
Normal file
17
docs/architecture/04_strategy/04_04_future_considerations.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
title: '04.04 — Future Considerations'
|
||||||
|
description: 'Future considerations for the CalMiner architecture.'
|
||||||
|
---
|
||||||
|
|
||||||
|
# 04.04 — Future Considerations
|
||||||
|
|
||||||
|
- **Scalability**: As the user base grows, consider transitioning to a microservices architecture and implementing load balancing strategies.
|
||||||
|
- **Asynchronous Processing**: Introduce asynchronous task queues (e.g., Celery) for long-running simulations to improve responsiveness.
|
||||||
|
- **Enhanced Frontend**: Explore the possibility of integrating a frontend framework (e.g., React or Vue.js) for more dynamic user interactions in future iterations.
|
||||||
|
- **Advanced Analytics**: Plan for integrating advanced analytics and machine learning capabilities to enhance simulation accuracy and reporting insights.
|
||||||
|
- **Security Enhancements**: Implement robust authentication and authorization mechanisms to protect sensitive data and ensure compliance with industry standards.
|
||||||
|
- **Continuous Integration/Continuous Deployment (CI/CD)**: Establish CI/CD pipelines to automate testing, building, and deployment processes for faster and more reliable releases.
|
||||||
|
- **Monitoring and Logging**: Integrate monitoring tools (e.g., Prometheus, Grafana) and centralized logging solutions (e.g., ELK stack) to track application performance and troubleshoot issues effectively.
|
||||||
|
- **User Feedback Loop**: Implement mechanisms for collecting user feedback to inform future development priorities and improve user experience.
|
||||||
|
- **Documentation**: Maintain comprehensive documentation for both developers and end-users to facilitate onboarding and effective use of the system.
|
||||||
|
- **Testing Strategy**: Develop a robust testing strategy, including unit, integration, and end-to-end tests, to ensure code quality and reliability as the system evolves.
|
||||||
13
docs/architecture/05_blocks/05_01_architecture_overview.md
Normal file
13
docs/architecture/05_blocks/05_01_architecture_overview.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
title: '05 — Architecture Overview'
|
||||||
|
description: "This overview complements architecture with a high-level map of CalMiner's module layout and request flow."
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
This overview complements [architecture](README.md) with a high-level map of CalMiner's module layout and request flow.
|
||||||
|
|
||||||
|
Refer to the detailed architecture chapters in `docs/architecture/`:
|
||||||
|
|
||||||
|
- Module map & components: [Building Block View](../05_building_block_view.md)
|
||||||
|
- Request flow & runtime interactions: [Runtime View](../06_runtime_view.md)
|
||||||
|
- Simulation roadmap & strategy: [Solution Strategy](../04_solution_strategy.md)
|
||||||
13
docs/architecture/05_blocks/05_02_backend_components.md
Normal file
13
docs/architecture/05_blocks/05_02_backend_components.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
title: '05 — Backend Components'
|
||||||
|
description: 'Description of the backend components of the CalMiner application.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
- **FastAPI application** (`main.py`): entry point that configures routers, middleware, and startup/shutdown events.
|
||||||
|
- **Routers** (`routes/`): modular route handlers for scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting. Each router defines RESTful endpoints, request/response schemas, and orchestrates service calls.
|
||||||
|
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
||||||
|
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
||||||
|
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
||||||
|
- `services/settings.py`: manages application settings backed by the `application_setting` table, including CSS variable defaults, persistence, and environment-driven overrides that surface in both the API and UI.
|
||||||
|
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
||||||
11
docs/architecture/05_blocks/05_03_frontend_components.md
Normal file
11
docs/architecture/05_blocks/05_03_frontend_components.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
title: '05 — Frontend Components'
|
||||||
|
description: 'Description of the frontend components of the CalMiner application.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
||||||
|
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
||||||
|
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
||||||
|
- `templates/settings.html`: Settings hub that renders theme controls and environment override tables using metadata provided by `routes/ui.py`.
|
||||||
|
- `static/js/settings.js`: applies client-side validation, form submission, and live CSS updates for theme changes, respecting environment-managed variables returned by the API.
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
title: '05 — Middleware & Utilities'
|
||||||
|
description: 'Description of the middleware and utility components of the CalMiner application.'
|
||||||
|
status: draft
|
||||||
|
---
|
||||||
|
|
||||||
|
- **Middleware** (`middleware/validation.py`): applies JSON validation before requests reach routers.
|
||||||
|
- **Testing** (`tests/unit/`): pytest suite covering route and service behavior, including UI rendering checks and negative-path router validation tests to ensure consistent HTTP error semantics. Playwright end-to-end coverage is planned for core smoke flows (dashboard load, scenario inputs, reporting) and will attach in CI once scaffolding is completed.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
title: '05 — Building Block View'
|
title: "05 — Building Block View"
|
||||||
description: 'Explain the static structure: modules, components, services and their relationships.'
|
description: "Explain the static structure: modules, components, services and their relationships."
|
||||||
status: draft
|
status: draft
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -8,194 +8,9 @@ status: draft
|
|||||||
|
|
||||||
# 05 — Building Block View
|
# 05 — Building Block View
|
||||||
|
|
||||||
## Architecture overview
|
## Building Block Overview
|
||||||
|
|
||||||
This overview complements [architecture](README.md) with a high-level map of CalMiner's module layout and request flow.
|
- [Architecture Overview](05_blocks/05_01_architecture_overview.md)
|
||||||
|
- [Backend Components](05_blocks/05_02_backend_components.md)
|
||||||
Refer to the detailed architecture chapters in `docs/architecture/`:
|
- [Frontend Components](05_blocks/05_03_frontend_components.md)
|
||||||
|
- [Middleware & Utilities](05_blocks/05_04_middleware_utilities.md)
|
||||||
- Module map & components: [Building Block View](05_building_block_view.md)
|
|
||||||
- Request flow & runtime interactions: [Runtime View](06_runtime_view.md)
|
|
||||||
- Simulation roadmap & strategy: [Solution Strategy](04_solution_strategy.md)
|
|
||||||
|
|
||||||
## System Components
|
|
||||||
|
|
||||||
### Backend
|
|
||||||
|
|
||||||
- **FastAPI application** (`main.py`): entry point that configures routers, middleware, and startup/shutdown events.
|
|
||||||
- **Routers** (`routes/`): modular route handlers for scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting. Each router defines RESTful endpoints, request/response schemas, and orchestrates service calls.
|
|
||||||
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
|
||||||
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
|
||||||
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
|
||||||
- `services/settings.py`: manages application settings backed by the `application_setting` table, including CSS variable defaults, persistence, and environment-driven overrides that surface in both the API and UI.
|
|
||||||
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
|
||||||
|
|
||||||
### Frontend
|
|
||||||
|
|
||||||
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
|
||||||
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
|
||||||
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
|
||||||
- `templates/settings.html`: Settings hub that renders theme controls and environment override tables using metadata provided by `routes/ui.py`.
|
|
||||||
- `static/js/settings.js`: applies client-side validation, form submission, and live CSS updates for theme changes, respecting environment-managed variables returned by the API.
|
|
||||||
|
|
||||||
### Middleware & Utilities
|
|
||||||
|
|
||||||
- **Middleware** (`middleware/validation.py`): applies JSON validation before requests reach routers.
|
|
||||||
- **Testing** (`tests/unit/`): pytest suite covering route and service behavior, including UI rendering checks and negative-path router validation tests to ensure consistent HTTP error semantics. Playwright end-to-end coverage is planned for core smoke flows (dashboard load, scenario inputs, reporting) and will attach in CI once scaffolding is completed.
|
|
||||||
|
|
||||||
### Component Diagram
|
|
||||||
|
|
||||||
# System Architecture — Mermaid Diagram
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
graph LR
|
|
||||||
%% Direction
|
|
||||||
%% LR = left-to-right for a wide architecture view
|
|
||||||
|
|
||||||
%% === Clients ===
|
|
||||||
U["User (Browser)"]
|
|
||||||
|
|
||||||
%% === Frontend ===
|
|
||||||
subgraph FE[Frontend]
|
|
||||||
TPL["Jinja2 Templates\n(templates/)\n• base layout + sidebar"]
|
|
||||||
PARTS["Reusable Partials\n(templates/partials/components.html)\n• inputs • empty states • table wrappers"]
|
|
||||||
STATIC["Static Assets\n(static/)\n• CSS: static/css/main.css (palette via CSS vars)\n• JS: static/js/*.js (page modules)"]
|
|
||||||
SETPAGE["Settings View\n(templates/settings.html)"]
|
|
||||||
SETJS["Settings Logic\n(static/js/settings.js)\n• validation • submit • live CSS updates"]
|
|
||||||
end
|
|
||||||
|
|
||||||
%% === Backend ===
|
|
||||||
subgraph BE[Backend FastAPI]
|
|
||||||
MAIN["FastAPI App\n(main.py)\n• routers • middleware • startup/shutdown"]
|
|
||||||
|
|
||||||
subgraph ROUTES[Routers]
|
|
||||||
R_SCN["scenarios"]
|
|
||||||
R_PAR["parameters"]
|
|
||||||
R_CST["costs"]
|
|
||||||
R_CONS["consumption"]
|
|
||||||
R_PROD["production"]
|
|
||||||
R_EQP["equipment"]
|
|
||||||
R_MNT["maintenance"]
|
|
||||||
R_SIM["simulations"]
|
|
||||||
R_REP["reporting"]
|
|
||||||
R_UI["ui.py (metadata for UI)"]
|
|
||||||
DEP["dependencies.get_db\n(shared SQLAlchemy session)"]
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph SRV[Services]
|
|
||||||
S_BLL["Business Logic Layer\n• orchestrates models + calc"]
|
|
||||||
S_REP["Reporting Calculations"]
|
|
||||||
S_SIM["Monte Carlo\n(simulation scaffolding)"]
|
|
||||||
S_SET["Settings Manager\n(services/settings.py)\n• defaults via CSS vars\n• persistence in DB\n• env overrides\n• surfaces to API & UI"]
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph MOD[Models]
|
|
||||||
M_SCN["Scenario"]
|
|
||||||
M_CAP["CapEx"]
|
|
||||||
M_OPEX["OpEx"]
|
|
||||||
M_CONS["Consumption"]
|
|
||||||
M_PROD["ProductionOutput"]
|
|
||||||
M_EQP["Equipment"]
|
|
||||||
M_MNT["Maintenance"]
|
|
||||||
M_SIMR["SimulationResult"]
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph DB[Database Layer]
|
|
||||||
CFG["config/database.py\n(SQLAlchemy engine & sessions)"]
|
|
||||||
PG[("PostgreSQL")]
|
|
||||||
APPSET["application_setting table"]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
%% === Middleware & Utilities ===
|
|
||||||
subgraph MW[Middleware & Utilities]
|
|
||||||
VAL["JSON Validation Middleware\n(middleware/validation.py)"]
|
|
||||||
end
|
|
||||||
|
|
||||||
subgraph TEST[Testing]
|
|
||||||
UNIT["pytest unit tests\n(tests/unit/)\n• routes • services • UI rendering\n• negative-path validation"]
|
|
||||||
E2E["Playwright E2E (planned)\n• dashboard • scenario inputs • reporting\n• attach in CI"]
|
|
||||||
end
|
|
||||||
|
|
||||||
%% ===================== Edges / Flows =====================
|
|
||||||
%% User to Frontend/Backend
|
|
||||||
U -->|HTTP GET| MAIN
|
|
||||||
U --> TPL
|
|
||||||
TPL -->|server-rendered HTML| U
|
|
||||||
STATIC --> U
|
|
||||||
PARTS --> TPL
|
|
||||||
SETPAGE --> U
|
|
||||||
SETJS --> U
|
|
||||||
|
|
||||||
%% Frontend to Routers (AJAX/form submits)
|
|
||||||
SETJS -->|fetch/POST| R_UI
|
|
||||||
TPL -->|form submit / fetch| ROUTES
|
|
||||||
|
|
||||||
%% FastAPI app wiring and middleware
|
|
||||||
VAL --> MAIN
|
|
||||||
MAIN --> ROUTES
|
|
||||||
|
|
||||||
%% Routers to Services
|
|
||||||
ROUTES -->|calls| SRV
|
|
||||||
R_REP -->|calc| S_REP
|
|
||||||
R_SIM -->|run| S_SIM
|
|
||||||
R_UI -->|read/write settings meta| S_SET
|
|
||||||
|
|
||||||
%% Services to Models & DB
|
|
||||||
SRV --> MOD
|
|
||||||
MOD --> CFG
|
|
||||||
CFG --> PG
|
|
||||||
|
|
||||||
%% Settings manager persistence path
|
|
||||||
S_SET -->|persist/read| APPSET
|
|
||||||
APPSET --- PG
|
|
||||||
|
|
||||||
%% Shared DB session dependency
|
|
||||||
DEP -. provides .-> ROUTES
|
|
||||||
DEP -. session .-> SRV
|
|
||||||
|
|
||||||
%% Model entities mapping
|
|
||||||
S_BLL --> M_SCN & M_CAP & M_OPEX & M_CONS & M_PROD & M_EQP & M_MNT & M_SIMR
|
|
||||||
|
|
||||||
%% Testing coverage
|
|
||||||
UNIT --> ROUTES
|
|
||||||
UNIT --> SRV
|
|
||||||
UNIT --> TPL
|
|
||||||
UNIT --> VAL
|
|
||||||
E2E --> U
|
|
||||||
E2E --> MAIN
|
|
||||||
|
|
||||||
%% Legend
|
|
||||||
classDef store fill:#fff,stroke:#555,stroke-width:1px;
|
|
||||||
class PG store;
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Notes**
|
|
||||||
|
|
||||||
- Arrows represent primary data/command flow. Dashed arrows denote shared dependencies (injected SQLAlchemy session).
|
|
||||||
- The settings pipeline shows how environment overrides and DB-backed defaults propagate to both API and UI.
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
## Module Map (code)
|
|
||||||
|
|
||||||
- `scenario.py`: central scenario entity with relationships to cost, consumption, production, equipment, maintenance, and simulation results.
|
|
||||||
- `capex.py`, `opex.py`: financial expenditures tied to scenarios.
|
|
||||||
- `consumption.py`, `production_output.py`: operational data tables.
|
|
||||||
- `equipment.py`, `maintenance.py`: asset management models.
|
|
||||||
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
|
||||||
- `application_setting.py`: persists editable application configuration, currently focused on theme variables but designed to store future settings categories.
|
|
||||||
|
|
||||||
## Service Layer
|
|
||||||
|
|
||||||
- `reporting.py`: computes aggregates (count, min/max, mean, median, percentiles, standard deviation, variance, tail-risk metrics) from simulation results.
|
|
||||||
- `simulation.py`: scaffolds Monte Carlo simulation logic (currently in-memory; persistence planned).
|
|
||||||
- `currency.py`: handles currency normalization for cost tables.
|
|
||||||
- `utils.py`: shared helper functions (e.g., statistical calculations).
|
|
||||||
- `validation.py`: JSON schema validation middleware.
|
|
||||||
- `database.py`: SQLAlchemy engine and session setup.
|
|
||||||
- `dependencies.py`: FastAPI dependency injection for DB sessions.
|
|
||||||
|
|||||||
@@ -21,10 +21,7 @@ CalMiner uses a combination of unit, integration, and end-to-end tests to ensure
|
|||||||
### CI/CD
|
### CI/CD
|
||||||
|
|
||||||
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
||||||
- `test.yml` runs on every push, provisions a temporary Postgres 16 service, waits for readiness, executes the setup script in dry-run and live modes, then fans out into parallel matrix jobs for unit (`pytest tests/unit`) and end-to-end (`pytest tests/e2e`) suites. Playwright browsers install only for the E2E job.
|
- `ci.yml` runs on push and pull requests to `main` and `develop` branches. It provisions a temporary PostgreSQL 15 service, sets up Python 3.11, installs dependencies from `requirements.txt` and `requirements-test.txt`, runs pytest with coverage on all tests, and builds the Docker image.
|
||||||
- `build-and-push.yml` runs only after the **Run Tests** workflow finishes successfully (triggered via `workflow_run` on `main`). Once tests pass, it builds the Docker image with `docker/build-push-action@v2`, reuses cache-backed layers, and pushes to the Gitea registry.
|
|
||||||
- `deploy.yml` runs only after the build workflow reports success on `main`. It connects to the target host (via `appleboy/ssh-action`), pulls the Docker image tagged with the build commit SHA, and restarts the container with that exact image reference.
|
|
||||||
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
|
||||||
- Run tests on pull requests to shared branches; enforce coverage target ≥80% (pytest-cov).
|
- Run tests on pull requests to shared branches; enforce coverage target ≥80% (pytest-cov).
|
||||||
|
|
||||||
### Running Tests
|
### Running Tests
|
||||||
@@ -74,7 +71,7 @@ To run the Playwright tests:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
pytest tests/e2e/
|
pytest tests/e2e/
|
||||||
````
|
```
|
||||||
|
|
||||||
To run headed mode:
|
To run headed mode:
|
||||||
|
|
||||||
@@ -166,11 +163,11 @@ When adding new workflows, mirror this structure to ensure secrets, caching, and
|
|||||||
- Usage sketch (in `test.yml`):
|
- Usage sketch (in `test.yml`):
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Prepare Python environment
|
- name: Prepare Python environment
|
||||||
uses: ./.gitea/actions/setup-python-env
|
uses: ./.gitea/actions/setup-python-env
|
||||||
with:
|
with:
|
||||||
install-playwright: ${{ matrix.target == 'e2e' }}
|
install-playwright: ${{ matrix.target == 'e2e' }}
|
||||||
db-dry-run: true
|
db-dry-run: true
|
||||||
```
|
```
|
||||||
|
|
||||||
- Benefits: centralizes proxy logic and dependency installs, reduces duplication across matrix jobs, and keeps future lint/type-check jobs lightweight by disabling database setup.
|
- Benefits: centralizes proxy logic and dependency installs, reduces duplication across matrix jobs, and keeps future lint/type-check jobs lightweight by disabling database setup.
|
||||||
|
|||||||
82
docs/architecture/07_deployment/07_02_database.md
Normal file
82
docs/architecture/07_deployment/07_02_database.md
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
# Database Deployment
|
||||||
|
|
||||||
|
## Migrations & Baseline
|
||||||
|
|
||||||
|
A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, provisions the `application_setting` store for configurable UI/system options, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX.
|
||||||
|
|
||||||
|
Configure granular database settings in your PowerShell session before running migrations:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
$env:DATABASE_DRIVER = 'postgresql'
|
||||||
|
$env:DATABASE_HOST = 'localhost'
|
||||||
|
$env:DATABASE_PORT = '5432'
|
||||||
|
$env:DATABASE_USER = 'calminer'
|
||||||
|
$env:DATABASE_PASSWORD = 's3cret'
|
||||||
|
$env:DATABASE_NAME = 'calminer'
|
||||||
|
$env:DATABASE_SCHEMA = 'public'
|
||||||
|
python scripts/setup_database.py --run-migrations --seed-data --dry-run
|
||||||
|
python scripts/setup_database.py --run-migrations --seed-data
|
||||||
|
```
|
||||||
|
|
||||||
|
The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API.
|
||||||
|
|
||||||
|
> ℹ️ When `--seed-data` is supplied without `--run-migrations`, the bootstrap script automatically applies any pending SQL migrations first so the `application_setting` table (and future settings-backed features) are present before seeding.
|
||||||
|
>
|
||||||
|
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
||||||
|
|
||||||
|
## Database bootstrap workflow
|
||||||
|
|
||||||
|
Provision or refresh a database instance with `scripts/setup_database.py`. Populate the required environment variables (an example lives at `config/setup_test.env.example`) and run:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Load test credentials (PowerShell)
|
||||||
|
Get-Content .\config\setup_test.env.example |
|
||||||
|
ForEach-Object {
|
||||||
|
if ($_ -and -not $_.StartsWith('#')) {
|
||||||
|
$name, $value = $_ -split '=', 2
|
||||||
|
Set-Item -Path Env:$name -Value $value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Dry-run to inspect the planned actions
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
||||||
|
|
||||||
|
# Execute the full workflow
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
||||||
|
```
|
||||||
|
|
||||||
|
Typical log output confirms:
|
||||||
|
|
||||||
|
- Admin and application connections succeed for the supplied credentials.
|
||||||
|
- Database and role creation are idempotent (`already present` when rerun).
|
||||||
|
- SQLAlchemy metadata either reports missing tables or `All tables already exist`.
|
||||||
|
- Migrations list pending files and finish with `Applied N migrations` (a new database reports `Applied 1 migrations` for `000_base.sql`).
|
||||||
|
|
||||||
|
After a successful run the target database contains all application tables plus `schema_migrations`, and that table records each applied migration file. New installations only record `000_base.sql`; upgraded environments retain historical entries alongside the baseline.
|
||||||
|
|
||||||
|
### Seeding reference data
|
||||||
|
|
||||||
|
`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/seed_data.py --currencies --units --dry-run
|
||||||
|
python scripts/seed_data.py --currencies --units
|
||||||
|
```
|
||||||
|
|
||||||
|
The seeder upserts the canonical currency catalog (`USD`, `EUR`, `CLP`, `RMB`, `GBP`, `CAD`, `AUD`) using ASCII-safe symbols (`USD$`, `EUR`, etc.) and the measurement units referenced by the UI (`tonnes`, `kilograms`, `pounds`, `liters`, `cubic_meters`, `kilowatt_hours`). The setup script invokes the same seeder when `--seed-data` is provided and verifies the expected rows afterward, warning if any are missing or inactive.
|
||||||
|
|
||||||
|
### Rollback guidance
|
||||||
|
|
||||||
|
`scripts/setup_database.py` now tracks compensating actions when it creates the database or application role. If a later step fails, the script replays those rollback actions (dropping the newly created database or role and revoking grants) before exiting. Dry runs never register rollback steps and remain read-only.
|
||||||
|
|
||||||
|
If the script reports that some rollback steps could not complete—for example because a connection cannot be established—rerun the script with `--dry-run` to confirm the desired end state and then apply the outstanding cleanup manually:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --dry-run -v
|
||||||
|
|
||||||
|
# Manual cleanup examples when automation cannot connect
|
||||||
|
psql -d postgres -c "DROP DATABASE IF EXISTS calminer"
|
||||||
|
psql -d postgres -c "DROP ROLE IF EXISTS calminer"
|
||||||
|
```
|
||||||
|
|
||||||
|
After a failure and rollback, rerun the full setup once the environment issues are resolved.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
title: "07 — Deployment View"
|
title: '07 — Deployment View'
|
||||||
description: "Describe deployment topology, infrastructure components, and environments (dev/stage/prod)."
|
description: 'Describe deployment topology, infrastructure components, and environments (dev/stage/prod).'
|
||||||
status: draft
|
status: draft
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -85,6 +85,14 @@ The development environment is set up for local development and testing. It incl
|
|||||||
- Local PostgreSQL instance (docker compose recommended, script available at `docker-compose.postgres.yml`)
|
- Local PostgreSQL instance (docker compose recommended, script available at `docker-compose.postgres.yml`)
|
||||||
- FastAPI server running in debug mode
|
- FastAPI server running in debug mode
|
||||||
|
|
||||||
|
`docker-compose.dev.yml` encapsulates this topology:
|
||||||
|
|
||||||
|
- `api` service mounts the repository for live reloads (`uvicorn --reload`) and depends on the database health check.
|
||||||
|
- `db` service uses the Debian-based `postgres:16` image with UTF-8 locale configuration and persists data in `pg_data_dev`.
|
||||||
|
- A shared `calminer_backend` bridge network keeps traffic contained; ports 8000/5432 are published for local tooling.
|
||||||
|
|
||||||
|
See [docs/quickstart.md](../quickstart.md#compose-driven-development-stack) for command examples and volume maintenance tips.
|
||||||
|
|
||||||
### Testing Environment
|
### Testing Environment
|
||||||
|
|
||||||
The testing environment is set up for automated testing and quality assurance. It includes:
|
The testing environment is set up for automated testing and quality assurance. It includes:
|
||||||
@@ -93,6 +101,14 @@ The testing environment is set up for automated testing and quality assurance. I
|
|||||||
- FastAPI server running in testing mode
|
- FastAPI server running in testing mode
|
||||||
- Automated test suite (e.g., pytest) for running unit and integration tests
|
- Automated test suite (e.g., pytest) for running unit and integration tests
|
||||||
|
|
||||||
|
`docker-compose.test.yml` provisions an ephemeral CI-like stack:
|
||||||
|
|
||||||
|
- `tests` service builds the application image, installs `requirements-test.txt`, runs the database setup script (dry-run + apply), then executes pytest.
|
||||||
|
- `api` service is available on port 8001 for manual verification against the test database.
|
||||||
|
- `postgres` service seeds a disposable Postgres 16 instance with health checks and named volumes (`pg_data_test`, `pip_cache_test`).
|
||||||
|
|
||||||
|
Typical commands mirror the CI workflow (`docker compose -f docker-compose.test.yml run --rm tests`); the [quickstart](../quickstart.md#compose-driven-test-stack) lists variations and teardown steps.
|
||||||
|
|
||||||
### Production Environment
|
### Production Environment
|
||||||
|
|
||||||
The production environment is set up for serving live traffic and includes:
|
The production environment is set up for serving live traffic and includes:
|
||||||
@@ -102,6 +118,22 @@ The production environment is set up for serving live traffic and includes:
|
|||||||
- Load balancer (Traefik) for distributing incoming requests
|
- Load balancer (Traefik) for distributing incoming requests
|
||||||
- Monitoring and logging tools for tracking application performance
|
- Monitoring and logging tools for tracking application performance
|
||||||
|
|
||||||
|
#### Production docker compose topology
|
||||||
|
|
||||||
|
- `docker-compose.prod.yml` defines the runtime topology for operator-managed deployments.
|
||||||
|
- `api` service runs the FastAPI image with resource limits (`API_LIMIT_CPUS`, `API_LIMIT_MEMORY`) and a `/health` probe consumed by Traefik and the Compose health check.
|
||||||
|
- `traefik` service (enabled via the `reverse-proxy` profile) terminates TLS using the ACME resolver configured by `TRAEFIK_ACME_EMAIL` and routes `CALMINER_DOMAIN` traffic to the API.
|
||||||
|
- `postgres` service (enabled via the `local-db` profile) exists for edge deployments without managed PostgreSQL and persists data in the `pg_data_prod` volume while mounting `./backups` for operator snapshots.
|
||||||
|
- All services join the configurable `CALMINER_NETWORK` (defaults to `calminer_backend`) to keep traffic isolated from host networks.
|
||||||
|
|
||||||
|
Deployment workflow:
|
||||||
|
|
||||||
|
1. Copy `config/setup_production.env.example` to `config/setup_production.env` and populate domain, registry image tag, database credentials, and resource budgets.
|
||||||
|
2. Launch the stack with `docker compose --env-file config/setup_production.env -f docker-compose.prod.yml --profile reverse-proxy up -d` (append `--profile local-db` when hosting Postgres locally).
|
||||||
|
3. Run database migrations and seeding using `docker compose --env-file config/setup_production.env -f docker-compose.prod.yml run --rm api python scripts/setup_database.py --run-migrations --seed-data`.
|
||||||
|
4. Monitor container health via `docker compose -f docker-compose.prod.yml ps` or Traefik dashboards; the API health endpoint returns `{ "status": "ok" }` when ready.
|
||||||
|
5. Shut down with `docker compose -f docker-compose.prod.yml down` (volumes persist unless `-v` is supplied).
|
||||||
|
|
||||||
## Containerized Deployment Flow
|
## Containerized Deployment Flow
|
||||||
|
|
||||||
The Docker-based deployment path aligns with the solution strategy documented in [Solution Strategy](04_solution_strategy.md) and the CI practices captured in [Testing & CI](07_deployment/07_01_testing_ci.md.md).
|
The Docker-based deployment path aligns with the solution strategy documented in [Solution Strategy](04_solution_strategy.md) and the CI practices captured in [Testing & CI](07_deployment/07_01_testing_ci.md.md).
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ This document outlines the proposed user roles and permissions model for the Cal
|
|||||||
|
|
||||||
## Authentication System
|
## Authentication System
|
||||||
|
|
||||||
The authentication system uses JWT (JSON Web Tokens) for securing API endpoints. Users can register with a username, email, and password. Passwords are hashed using bcrypt. Upon successful login, an access token is issued, which must be included in subsequent requests for protected resources.
|
The authentication system uses JWT (JSON Web Tokens) for securing API endpoints. Users can register with a username, email, and password. Passwords are hashed using a `passlib` CryptContext for secure, configurable hashing. Upon successful login, an access token is issued, which must be included in subsequent requests for protected resources.
|
||||||
|
|
||||||
## Key Components
|
## Key Components
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
title: "CalMiner Architecture Documentation"
|
title: 'CalMiner Architecture Documentation'
|
||||||
description: "arc42-based architecture documentation for the CalMiner project"
|
description: 'arc42-based architecture documentation for the CalMiner project'
|
||||||
---
|
---
|
||||||
|
|
||||||
# Architecture documentation (arc42 mapping)
|
# Architecture documentation (arc42 mapping)
|
||||||
@@ -11,16 +11,32 @@ This folder mirrors the arc42 chapter structure (adapted to Markdown).
|
|||||||
|
|
||||||
- [01 Introduction and Goals](01_introduction_and_goals.md)
|
- [01 Introduction and Goals](01_introduction_and_goals.md)
|
||||||
- [02 Architecture Constraints](02_architecture_constraints.md)
|
- [02 Architecture Constraints](02_architecture_constraints.md)
|
||||||
|
- [02_01 Technical Constraints](02_constraints/02_01_technical_constraints.md)
|
||||||
|
- [02_02 Organizational Constraints](02_constraints/02_02_organizational_constraints.md)
|
||||||
|
- [02_03 Regulatory Constraints](02_constraints/02_03_regulatory_constraints.md)
|
||||||
|
- [02_04 Environmental Constraints](02_constraints/02_04_environmental_constraints.md)
|
||||||
|
- [02_05 Performance Constraints](02_constraints/02_05_performance_constraints.md)
|
||||||
- [03 Context and Scope](03_context_and_scope.md)
|
- [03 Context and Scope](03_context_and_scope.md)
|
||||||
|
- [03_01 Architecture Scope](03_scope/03_01_architecture_scope.md)
|
||||||
- [04 Solution Strategy](04_solution_strategy.md)
|
- [04 Solution Strategy](04_solution_strategy.md)
|
||||||
|
- [04_01 Client-Server Architecture](04_strategy/04_01_client_server_architecture.md)
|
||||||
|
- [04_02 Technology Choices](04_strategy/04_02_technology_choices.md)
|
||||||
|
- [04_03 Trade-offs](04_strategy/04_03_trade_offs.md)
|
||||||
|
- [04_04 Future Considerations](04_strategy/04_04_future_considerations.md)
|
||||||
- [05 Building Block View](05_building_block_view.md)
|
- [05 Building Block View](05_building_block_view.md)
|
||||||
|
- [05_01 Architecture Overview](05_blocks/05_01_architecture_overview.md)
|
||||||
|
- [05_02 Backend Components](05_blocks/05_02_backend_components.md)
|
||||||
|
- [05_03 Frontend Components](05_blocks/05_03_frontend_components.md)
|
||||||
|
- [05_03 Theming](05_blocks/05_03_theming.md)
|
||||||
|
- [05_04 Middleware & Utilities](05_blocks/05_04_middleware_utilities.md)
|
||||||
- [06 Runtime View](06_runtime_view.md)
|
- [06 Runtime View](06_runtime_view.md)
|
||||||
- [07 Deployment View](07_deployment_view.md)
|
- [07 Deployment View](07_deployment_view.md)
|
||||||
- [Testing & CI](07_deployment/07_01_testing_ci.md.md)
|
- [07_01 Testing & CI](07_deployment/07_01_testing_ci.md.md)
|
||||||
|
- [07_02 Database](07_deployment/07_02_database.md)
|
||||||
- [08 Concepts](08_concepts.md)
|
- [08 Concepts](08_concepts.md)
|
||||||
|
- [08_01 Security](08_concepts/08_01_security.md)
|
||||||
|
- [08_02 Data Models](08_concepts/08_02_data_models.md)
|
||||||
- [09 Architecture Decisions](09_architecture_decisions.md)
|
- [09 Architecture Decisions](09_architecture_decisions.md)
|
||||||
- [10 Quality Requirements](10_quality_requirements.md)
|
- [10 Quality Requirements](10_quality_requirements.md)
|
||||||
- [11 Technical Risks](11_technical_risks.md)
|
- [11 Technical Risks](11_technical_risks.md)
|
||||||
- [12 Glossary](12_glossary.md)
|
- [12 Glossary](12_glossary.md)
|
||||||
- [13 UI and Style](13_ui_and_style.md)
|
|
||||||
- [15 Development Setup](15_development_setup.md)
|
|
||||||
|
|||||||
@@ -1,50 +1,77 @@
|
|||||||
# 15 Development Setup Guide
|
# Development Environment Setup
|
||||||
|
|
||||||
This document outlines the local development environment and steps to get the project running.
|
This document outlines the local development environment and steps to get the project running.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
- Python (version 3.10+)
|
- Python (version 3.11+)
|
||||||
- PostgreSQL (version 13+)
|
- PostgreSQL (version 13+)
|
||||||
- Git
|
- Git
|
||||||
|
- Docker and Docker Compose (optional, for containerized development)
|
||||||
|
|
||||||
## Clone and Project Setup
|
## Clone and Project Setup
|
||||||
|
|
||||||
````powershell
|
```powershell
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
git clone https://git.allucanget.biz/allucanget/calminer.git
|
git clone https://git.allucanget.biz/allucanget/calminer.git
|
||||||
cd calminer
|
cd calminer
|
||||||
```python
|
```
|
||||||
|
|
||||||
## Virtual Environment
|
## Development with Docker Compose (Recommended)
|
||||||
|
|
||||||
|
For a quick setup without installing PostgreSQL locally, use Docker Compose:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Start services
|
||||||
|
docker-compose up
|
||||||
|
|
||||||
|
# The app will be available at http://localhost:8000
|
||||||
|
# Database is automatically set up
|
||||||
|
```
|
||||||
|
|
||||||
|
To run in background:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
To stop:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
docker-compose down
|
||||||
|
```
|
||||||
|
|
||||||
|
## Manual Development Setup
|
||||||
|
|
||||||
|
### Virtual Environment
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
# Create and activate a virtual environment
|
# Create and activate a virtual environment
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.\.venv\Scripts\Activate.ps1
|
.\.venv\Scripts\Activate.ps1
|
||||||
```python
|
```
|
||||||
|
|
||||||
## Install Dependencies
|
### Install Dependencies
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```python
|
```
|
||||||
|
|
||||||
## Database Setup
|
### Database Setup
|
||||||
|
|
||||||
1. Create database user:
|
1. Create database user:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
CREATE USER calminer_user WITH PASSWORD 'your_password';
|
CREATE USER calminer_user WITH PASSWORD 'your_password';
|
||||||
````
|
```
|
||||||
|
|
||||||
1. Create database:
|
1. Create database:
|
||||||
|
|
||||||
````sql
|
```sql
|
||||||
CREATE DATABASE calminer;
|
CREATE DATABASE calminer;
|
||||||
```python
|
```
|
||||||
|
|
||||||
## Environment Variables
|
### Environment Variables
|
||||||
|
|
||||||
1. Copy `.env.example` to `.env` at project root.
|
1. Copy `.env.example` to `.env` at project root.
|
||||||
1. Edit `.env` to set database connection details:
|
1. Edit `.env` to set database connection details:
|
||||||
@@ -57,21 +84,21 @@ DATABASE_USER=calminer_user
|
|||||||
DATABASE_PASSWORD=your_password
|
DATABASE_PASSWORD=your_password
|
||||||
DATABASE_NAME=calminer
|
DATABASE_NAME=calminer
|
||||||
DATABASE_SCHEMA=public
|
DATABASE_SCHEMA=public
|
||||||
````
|
```
|
||||||
|
|
||||||
1. The application uses `python-dotenv` to load these variables. A legacy `DATABASE_URL` value is still accepted if the granular keys are omitted.
|
1. The application uses `python-dotenv` to load these variables. A legacy `DATABASE_URL` value is still accepted if the granular keys are omitted.
|
||||||
|
|
||||||
## Running the Application
|
### Running the Application
|
||||||
|
|
||||||
````powershell
|
```powershell
|
||||||
# Start the FastAPI server
|
# Start the FastAPI server
|
||||||
uvicorn main:app --reload
|
uvicorn main:app --reload
|
||||||
```python
|
```
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
pytest
|
pytest
|
||||||
````
|
```
|
||||||
|
|
||||||
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
||||||
@@ -1,8 +1,6 @@
|
|||||||
# 13 — UI, templates and styling
|
# UI, templates and styling
|
||||||
|
|
||||||
Status: migrated
|
This document outlines the UI structure, template components, CSS variable conventions, and per-page data/actions for the CalMiner application.
|
||||||
|
|
||||||
This chapter collects UI integration notes, reusable template components, styling audit points and per-page UI data/actions.
|
|
||||||
|
|
||||||
## Reusable Template Components
|
## Reusable Template Components
|
||||||
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
# Setup Script Idempotency Audit (2025-10-25)
|
|
||||||
|
|
||||||
This note captures the current evaluation of idempotent behaviour for `scripts/setup_database.py` and outlines follow-up actions.
|
|
||||||
|
|
||||||
## Admin Tasks
|
|
||||||
|
|
||||||
- **ensure_database**: guarded by `SELECT 1 FROM pg_database`; re-runs safely. Failure mode: network issues or lack of privileges surface as psycopg2 errors without additional context.
|
|
||||||
- **ensure_role**: checks `pg_roles`, creates role if missing, reapplies grants each time. Subsequent runs execute grants again but PostgreSQL tolerates repeated grants.
|
|
||||||
- **ensure_schema**: uses `information_schema` guard and respects `--dry-run`; idempotent when schema is `public` or already present.
|
|
||||||
|
|
||||||
## Application Tasks
|
|
||||||
|
|
||||||
- **initialize_schema**: relies on SQLAlchemy `create_all(checkfirst=True)`; repeatable. Dry-run output remains descriptive.
|
|
||||||
- **run_migrations**: new baseline workflow applies `000_base.sql` once and records legacy scripts as applied. Subsequent runs detect the baseline in `schema_migrations` and skip reapplication.
|
|
||||||
|
|
||||||
## Seeding
|
|
||||||
|
|
||||||
- `seed_baseline_data` seeds currencies and measurement units with upsert logic. Verification now raises on missing data, preventing silent failures.
|
|
||||||
- Running `--seed-data` repeatedly performs `ON CONFLICT` updates, making the operation safe.
|
|
||||||
|
|
||||||
## Outstanding Risks
|
|
||||||
|
|
||||||
1. Baseline migration relies on legacy files being present when first executed; if removed beforehand, old entries are never marked. (Low risk given repository state.)
|
|
||||||
2. `ensure_database` and `ensure_role` do not wrap SQL execution errors with additional context beyond psycopg2 messages.
|
|
||||||
3. Baseline verification assumes migrations and seeding run in the same process; manual runs of `scripts/seed_data.py` without the baseline could still fail.
|
|
||||||
|
|
||||||
## Recommended Actions
|
|
||||||
|
|
||||||
- Add regression tests ensuring repeated executions of key CLI paths (`--run-migrations`, `--seed-data`) result in no-op behaviour after the first run.
|
|
||||||
- Extend logging/error handling for admin operations to provide clearer messages on repeated failures.
|
|
||||||
- Consider a preflight check when migrations directory lacks legacy files but baseline is pending, warning about potential drift.
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
# Setup Script Logging Audit (2025-10-25)
|
|
||||||
|
|
||||||
The following observations capture current logging behaviour in `scripts/setup_database.py` and highlight areas requiring improved error handling and messaging.
|
|
||||||
|
|
||||||
## Connection Validation
|
|
||||||
|
|
||||||
- `validate_admin_connection` and `validate_application_connection` log entry/exit messages and raise `RuntimeError` with context if connection fails. This coverage is sufficient.
|
|
||||||
- `ensure_database` logs creation states but does not surface connection or SQL exceptions beyond the initial connection acquisition. When the inner `cursor.execute` calls fail, the exceptions bubble without contextual logging.
|
|
||||||
|
|
||||||
## Migration Runner
|
|
||||||
|
|
||||||
- Lists pending migrations and logs each application attempt.
|
|
||||||
- When the baseline is pending, the script logs whether it is a dry-run or live application and records legacy file marking. However, if `_apply_migration_file` raises an exception, the caller re-raises after logging the failure; there is no wrapping message guiding users toward manual cleanup.
|
|
||||||
- Legacy migration marking happens silently (just info logs). Failures during the insert into `schema_migrations` would currently propagate without added guidance.
|
|
||||||
|
|
||||||
## Seeding Workflow
|
|
||||||
|
|
||||||
- `seed_baseline_data` announces each seeding phase and skips verification in dry-run mode with a log breadcrumb.
|
|
||||||
- `_verify_seeded_data` warns about missing currencies/units and inactive defaults but does **not** raise errors, meaning CI can pass while the database is incomplete. There is no explicit log when verification succeeds.
|
|
||||||
- `_seed_units` logs when the `measurement_unit` table is missing, which is helpful, but the warning is the only feedback; no exception is raised.
|
|
||||||
|
|
||||||
## Suggested Enhancements
|
|
||||||
|
|
||||||
1. Wrap baseline application and legacy marking in `try/except` blocks that log actionable remediation steps before re-raising.
|
|
||||||
2. Promote seed verification failures (missing or inactive records) to exceptions so automated workflows fail fast; add success logs for clarity.
|
|
||||||
3. Add contextual logging around currency/measurement-unit insert failures, particularly around `execute_values` calls, to aid debugging malformed data.
|
|
||||||
4. Introduce structured logging (log codes or phases) for major steps (`CONNECT`, `MIGRATE`, `SEED`, `VERIFY`) to make scanning log files easier.
|
|
||||||
|
|
||||||
These findings inform the remaining TODO subtasks for enhanced error handling.
|
|
||||||
@@ -1,248 +1,87 @@
|
|||||||
# Quickstart & Expanded Project Documentation
|
# Developer Quickstart
|
||||||
|
|
||||||
This document contains the expanded development, usage, testing, and migration guidance moved out of the top-level README for brevity.
|
- [Developer Quickstart](#developer-quickstart)
|
||||||
|
- [Development](#development)
|
||||||
|
- [User Interface](#user-interface)
|
||||||
|
- [Testing](#testing)
|
||||||
|
- [Staging](#staging)
|
||||||
|
- [Deployment](#deployment)
|
||||||
|
- [Using Docker Compose](#using-docker-compose)
|
||||||
|
- [Manual Docker Deployment](#manual-docker-deployment)
|
||||||
|
- [Database Deployment \& Migrations](#database-deployment--migrations)
|
||||||
|
- [Usage Overview](#usage-overview)
|
||||||
|
- [Theme configuration](#theme-configuration)
|
||||||
|
- [Where to look next](#where-to-look-next)
|
||||||
|
|
||||||
|
This document provides a quickstart guide for developers to set up and run the CalMiner application locally.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
To get started locally:
|
See [Development Setup](docs/developer/development_setup.md).
|
||||||
|
|
||||||
```powershell
|
### User Interface
|
||||||
# Clone the repository
|
|
||||||
git clone https://git.allucanget.biz/allucanget/calminer.git
|
|
||||||
cd calminer
|
|
||||||
|
|
||||||
# Create and activate a virtual environment
|
There is a dedicated [UI and Style](docs/developer/ui_and_style.md) guide for frontend contributors.
|
||||||
python -m venv .venv
|
|
||||||
.\.venv\Scripts\Activate.ps1
|
|
||||||
|
|
||||||
# Install dependencies
|
### Testing
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
# Start the development server
|
Testing is described in the [Testing CI](docs/architecture/07_deployment/07_01_testing_ci.md) document.
|
||||||
uvicorn main:app --reload
|
|
||||||
|
## Staging
|
||||||
|
|
||||||
|
Staging environment setup is covered in [Staging Environment Setup](docs/developer/staging_environment_setup.md).
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
The application can be deployed using Docker containers.
|
||||||
|
|
||||||
|
### Using Docker Compose
|
||||||
|
|
||||||
|
For production deployment, use the provided `docker-compose.yml`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
## Docker-based setup
|
This starts the FastAPI app and PostgreSQL database.
|
||||||
|
|
||||||
To build and run the application using Docker instead of a local Python environment:
|
### Manual Docker Deployment
|
||||||
|
|
||||||
```powershell
|
Build and run the container manually:
|
||||||
# Build the application image (multi-stage build keeps runtime small)
|
|
||||||
docker build -t calminer:latest .
|
|
||||||
|
|
||||||
# Start the container on port 8000
|
```bash
|
||||||
docker run --rm -p 8000:8000 calminer:latest
|
docker build -t calminer .
|
||||||
|
docker run -d -p 8000:8000 \
|
||||||
# Supply environment variables (e.g., Postgres connection)
|
-e DATABASE_HOST=your-postgres-host \
|
||||||
docker run --rm -p 8000:8000 ^
|
-e DATABASE_USER=calminer \
|
||||||
-e DATABASE_DRIVER="postgresql" ^
|
-e DATABASE_PASSWORD=your-password \
|
||||||
-e DATABASE_HOST="db.host" ^
|
-e DATABASE_NAME=calminer_db \
|
||||||
-e DATABASE_PORT="5432" ^
|
calminer
|
||||||
-e DATABASE_USER="calminer" ^
|
|
||||||
-e DATABASE_PASSWORD="s3cret" ^
|
|
||||||
-e DATABASE_NAME="calminer" ^
|
|
||||||
-e DATABASE_SCHEMA="public" ^
|
|
||||||
calminer:latest
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If you maintain a Postgres or Redis dependency locally, consider authoring a `docker compose` stack that pairs them with the app container. The Docker image expects the database to be reachable and migrations executed before serving traffic.
|
Ensure the database is set up and migrated before running.
|
||||||
|
|
||||||
|
### Database Deployment & Migrations
|
||||||
|
|
||||||
|
See the [Database Deployment & Migrations](docs/architecture/07_deployment/07_02_database_deployment_migrations.md) document for details on database deployment and migration strategies.
|
||||||
|
|
||||||
## Usage Overview
|
## Usage Overview
|
||||||
|
|
||||||
|
- **Run the application**: Follow the [Development Setup](docs/developer/development_setup.md) to get the application running locally.
|
||||||
|
- **Access the UI**: Open your web browser and navigate to `http://localhost:8000/ui` to access the user interface.
|
||||||
- **API base URL**: `http://localhost:8000/api`
|
- **API base URL**: `http://localhost:8000/api`
|
||||||
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
||||||
|
- **UI base URL**: `http://localhost:8000/ui`
|
||||||
|
|
||||||
### Theme configuration
|
### Theme configuration
|
||||||
|
|
||||||
- Open `/ui/settings` to access the Settings dashboard. The **Theme Colors** form lists every CSS variable persisted in the `application_setting` table. Updates apply immediately across the UI once saved.
|
Theming is laid out in [Theming](docs/architecture/05_03_theming.md).
|
||||||
- Use the accompanying API endpoints for automation or integration tests:
|
|
||||||
- `GET /api/settings/css` returns the active variables, defaults, and metadata describing any environment overrides.
|
|
||||||
- `PUT /api/settings/css` accepts a payload such as `{"variables": {"--color-primary": "#112233"}}` and persists the change unless an environment override is in place.
|
|
||||||
- Environment variables prefixed with `CALMINER_THEME_` win over database values. For example, setting `CALMINER_THEME_COLOR_PRIMARY="#112233"` renders the corresponding input read-only and surfaces the override in the Environment Overrides table.
|
|
||||||
- Acceptable values include hex (`#rrggbb` or `#rrggbbaa`), `rgb()/rgba()`, and `hsl()/hsla()` expressions with the expected number of components. Invalid inputs trigger a validation error and the API responds with HTTP 422.
|
|
||||||
|
|
||||||
## Dashboard Preview
|
|
||||||
|
|
||||||
1. Start the FastAPI server and navigate to `/`.
|
|
||||||
2. Review the headline metrics, scenario snapshot table, and cost/activity charts sourced from the current database state.
|
|
||||||
3. Use the "Refresh Dashboard" button to pull freshly aggregated data via `/ui/dashboard/data` without reloading the page.
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
Run the unit test suite:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
pytest
|
|
||||||
```
|
|
||||||
|
|
||||||
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
|
||||||
|
|
||||||
## Migrations & Baseline
|
|
||||||
|
|
||||||
A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, provisions the `application_setting` store for configurable UI/system options, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX.
|
|
||||||
|
|
||||||
Configure granular database settings in your PowerShell session before running migrations:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
$env:DATABASE_DRIVER = 'postgresql'
|
|
||||||
$env:DATABASE_HOST = 'localhost'
|
|
||||||
$env:DATABASE_PORT = '5432'
|
|
||||||
$env:DATABASE_USER = 'calminer'
|
|
||||||
$env:DATABASE_PASSWORD = 's3cret'
|
|
||||||
$env:DATABASE_NAME = 'calminer'
|
|
||||||
$env:DATABASE_SCHEMA = 'public'
|
|
||||||
python scripts/setup_database.py --run-migrations --seed-data --dry-run
|
|
||||||
python scripts/setup_database.py --run-migrations --seed-data
|
|
||||||
```
|
|
||||||
|
|
||||||
The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API.
|
|
||||||
|
|
||||||
> ℹ️ When `--seed-data` is supplied without `--run-migrations`, the bootstrap script automatically applies any pending SQL migrations first so the `application_setting` table (and future settings-backed features) are present before seeding.
|
|
||||||
|
|
||||||
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
|
||||||
|
|
||||||
## Database bootstrap workflow
|
|
||||||
|
|
||||||
Provision or refresh a database instance with `scripts/setup_database.py`. Populate the required environment variables (an example lives at `config/setup_test.env.example`) and run:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
# Load test credentials (PowerShell)
|
|
||||||
Get-Content .\config\setup_test.env.example |
|
|
||||||
ForEach-Object {
|
|
||||||
if ($_ -and -not $_.StartsWith('#')) {
|
|
||||||
$name, $value = $_ -split '=', 2
|
|
||||||
Set-Item -Path Env:$name -Value $value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Dry-run to inspect the planned actions
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
|
||||||
|
|
||||||
# Execute the full workflow
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
|
||||||
```
|
|
||||||
|
|
||||||
Typical log output confirms:
|
|
||||||
|
|
||||||
- Admin and application connections succeed for the supplied credentials.
|
|
||||||
- Database and role creation are idempotent (`already present` when rerun).
|
|
||||||
- SQLAlchemy metadata either reports missing tables or `All tables already exist`.
|
|
||||||
- Migrations list pending files and finish with `Applied N migrations` (a new database reports `Applied 1 migrations` for `000_base.sql`).
|
|
||||||
|
|
||||||
After a successful run the target database contains all application tables plus `schema_migrations`, and that table records each applied migration file. New installations only record `000_base.sql`; upgraded environments retain historical entries alongside the baseline.
|
|
||||||
|
|
||||||
### Local Postgres via Docker Compose
|
|
||||||
|
|
||||||
For local validation without installing Postgres directly, use the provided compose file:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
docker compose -f docker-compose.postgres.yml up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Summary
|
|
||||||
|
|
||||||
1. Start the Postgres container with `docker compose -f docker-compose.postgres.yml up -d`.
|
|
||||||
2. Export the granular database environment variables (host `127.0.0.1`, port `5433`, database `calminer_local`, user/password `calminer`/`secret`).
|
|
||||||
3. Run the setup script twice: first with `--dry-run` to preview actions, then without it to apply changes.
|
|
||||||
4. When finished, stop and optionally remove the container/volume using `docker compose -f docker-compose.postgres.yml down`.
|
|
||||||
|
|
||||||
The service exposes Postgres 16 on `localhost:5433` with database `calminer_local` and role `calminer`/`secret`. When the container is running, set the granular environment variables before invoking the setup script:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
$env:DATABASE_DRIVER = 'postgresql'
|
|
||||||
$env:DATABASE_HOST = '127.0.0.1'
|
|
||||||
$env:DATABASE_PORT = '5433'
|
|
||||||
$env:DATABASE_USER = 'calminer'
|
|
||||||
$env:DATABASE_PASSWORD = 'secret'
|
|
||||||
$env:DATABASE_NAME = 'calminer_local'
|
|
||||||
$env:DATABASE_SCHEMA = 'public'
|
|
||||||
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
|
||||||
```
|
|
||||||
|
|
||||||
When testing is complete, shut down the container (and optional persistent volume) with:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
docker compose -f docker-compose.postgres.yml down
|
|
||||||
docker volume rm calminer_postgres_local_postgres_data # optional cleanup
|
|
||||||
```
|
|
||||||
|
|
||||||
### Seeding reference data
|
|
||||||
|
|
||||||
`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
python scripts/seed_data.py --currencies --units --dry-run
|
|
||||||
python scripts/seed_data.py --currencies --units
|
|
||||||
```
|
|
||||||
|
|
||||||
The seeder upserts the canonical currency catalog (`USD`, `EUR`, `CLP`, `RMB`, `GBP`, `CAD`, `AUD`) using ASCII-safe symbols (`USD$`, `EUR`, etc.) and the measurement units referenced by the UI (`tonnes`, `kilograms`, `pounds`, `liters`, `cubic_meters`, `kilowatt_hours`). The setup script invokes the same seeder when `--seed-data` is provided and verifies the expected rows afterward, warning if any are missing or inactive.
|
|
||||||
|
|
||||||
### Rollback guidance
|
|
||||||
|
|
||||||
`scripts/setup_database.py` now tracks compensating actions when it creates the database or application role. If a later step fails, the script replays those rollback actions (dropping the newly created database or role and revoking grants) before exiting. Dry runs never register rollback steps and remain read-only.
|
|
||||||
|
|
||||||
If the script reports that some rollback steps could not complete—for example because a connection cannot be established—rerun the script with `--dry-run` to confirm the desired end state and then apply the outstanding cleanup manually:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
python scripts/setup_database.py --ensure-database --ensure-role --dry-run -v
|
|
||||||
|
|
||||||
# Manual cleanup examples when automation cannot connect
|
|
||||||
psql -d postgres -c "DROP DATABASE IF EXISTS calminer"
|
|
||||||
psql -d postgres -c "DROP ROLE IF EXISTS calminer"
|
|
||||||
```
|
|
||||||
|
|
||||||
After a failure and rollback, rerun the full setup once the environment issues are resolved.
|
|
||||||
|
|
||||||
### CI pipeline environment
|
|
||||||
|
|
||||||
The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container and runs the setup script twice: once with `--dry-run` to validate the plan and again without it to apply migrations and seeds. No external secrets are required; the workflow sets the following environment variables for both invocations and for pytest:
|
|
||||||
|
|
||||||
| Variable | Value | Purpose |
|
|
||||||
| ----------------------------- | ------------- | ------------------------------------------------- |
|
|
||||||
| `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script |
|
|
||||||
| `DATABASE_HOST` | `postgres` | Hostname of the Postgres job service container |
|
|
||||||
| `DATABASE_PORT` | `5432` | Default service port |
|
|
||||||
| `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow |
|
|
||||||
| `DATABASE_USER` | `calminer` | Application role used during tests |
|
|
||||||
| `DATABASE_PASSWORD` | `secret` | Password for both admin and app role |
|
|
||||||
| `DATABASE_SCHEMA` | `public` | Default schema for the tests |
|
|
||||||
| `DATABASE_SUPERUSER` | `calminer` | Setup script uses the same role for admin actions |
|
|
||||||
| `DATABASE_SUPERUSER_PASSWORD` | `secret` | Matches the Postgres service password |
|
|
||||||
| `DATABASE_SUPERUSER_DB` | `calminer_ci` | Database to connect to for admin operations |
|
|
||||||
|
|
||||||
The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally.
|
|
||||||
|
|
||||||
Because the workflow provisions everything inline, no repository or organization secrets need to be configured for basic CI runs. If you later move the setup step to staging or production pipelines, replace these inline values with secrets managed by the CI platform. When running on self-hosted runners behind an HTTP proxy or apt cache, ensure Playwright dependencies and OS packages inherit the same proxy settings that the workflow configures prior to installing browsers.
|
|
||||||
|
|
||||||
### Staging environment workflow
|
|
||||||
|
|
||||||
Use the staging checklist in `docs/staging_environment_setup.md` when running the setup script against the shared environment. A sample variable file (`config/setup_staging.env`) records the expected inputs (host, port, admin/application roles); copy it outside the repository or load the values securely via your shell before executing the workflow.
|
|
||||||
|
|
||||||
Recommended execution order:
|
|
||||||
|
|
||||||
1. Dry run with `--dry-run -v` to confirm connectivity and review planned operations. Capture the output to `reports/setup_staging_dry_run.log` (or similar) for auditing.
|
|
||||||
2. Execute the live run with the same flags minus `--dry-run` to provision the database, role grants, migrations, and seed data. Save the log as `reports/setup_staging_apply.log`.
|
|
||||||
3. Repeat the dry run to verify idempotency and record the result (for example `reports/setup_staging_post_apply.log`).
|
|
||||||
|
|
||||||
## Database Objects
|
|
||||||
|
|
||||||
The database contains tables such as `capex`, `opex`, `chemical_consumption`, `fuel_consumption`, `water_consumption`, `scrap_consumption`, `production_output`, `equipment_operation`, `ore_batch`, `exchange_rate`, and `simulation_result`.
|
|
||||||
|
|
||||||
## Current implementation status (2025-10-21)
|
|
||||||
|
|
||||||
- Currency normalization: a `currency` table and backfill scripts exist; routes accept `currency_id` and `currency_code` for compatibility.
|
|
||||||
- Simulation engine: scaffolding in `services/simulation.py` and `/api/simulations/run` return in-memory results; persistence to `models/simulation_result` is planned.
|
|
||||||
- Reporting: `services/reporting.py` provides summary statistics used by `POST /api/reporting/summary`.
|
|
||||||
- Tests & coverage: unit and E2E suites exist; recent local coverage is >90%.
|
|
||||||
- Remaining work: authentication, persist simulation runs, CI/CD and containerization.
|
|
||||||
|
|
||||||
## Where to look next
|
## Where to look next
|
||||||
|
|
||||||
- Architecture overview & chapters: [architecture](architecture/README.md) (per-chapter files under `docs/architecture/`)
|
- Architecture overview & chapters: [architecture](architecture/README.md) (per-chapter files under `docs/architecture/`)
|
||||||
- [Testing & CI](architecture/07_deployment/07_01_testing_ci.md.md)
|
- [Testing & CI](architecture/07_deployment/07_01_testing_ci.md.md)
|
||||||
- [Development setup](architecture/15_development_setup.md)
|
- [Development setup](developer/development_setup.md)
|
||||||
- Implementation plan & roadmap: [Solution strategy](architecture/04_solution_strategy.md)
|
- Implementation plan & roadmap: [Solution strategy](architecture/04_solution_strategy.md)
|
||||||
- Routes: [routes](../routes/)
|
- Routes: [routes](../routes/)
|
||||||
- Services: [services](../services/)
|
- Services: [services](../services/)
|
||||||
|
|||||||
37
docs/roadmap.md
Normal file
37
docs/roadmap.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# Roadmap
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
## Scenario Enhancements
|
||||||
|
|
||||||
|
For each scenario, the goal is to evaluate financial viability, operational efficiency, and risk factors associated with the mining project. This data is used to perform calculations, generate reports, and visualize results through charts and dashboards, enabling users to make informed decisions based on comprehensive analysis.
|
||||||
|
|
||||||
|
### Scenario & Data Management
|
||||||
|
|
||||||
|
Scenarios are the core organizational unit within CalMiner, allowing users to create, manage, and analyze different mining project configurations. Each scenario encapsulates a unique set of parameters and data inputs that define the mining operation being modeled.
|
||||||
|
|
||||||
|
#### Scenario Creation
|
||||||
|
|
||||||
|
Users can create new scenarios by providing a unique name and description. The system will generate a new scenario with default parameters, which can be customized later.
|
||||||
|
|
||||||
|
#### Scenario Management
|
||||||
|
|
||||||
|
Users can manage existing scenarios by modifying their parameters, adding new data inputs, or deleting them as needed.
|
||||||
|
|
||||||
|
#### Data Inputs
|
||||||
|
|
||||||
|
Users can define and manage various data inputs for each scenario, including:
|
||||||
|
|
||||||
|
- **Geological Data**: Input data related to the geological characteristics of the mining site.
|
||||||
|
- **Operational Parameters**: Define parameters such as mining methods, equipment specifications, and workforce details.
|
||||||
|
- **Financial Data**: Input cost structures, revenue models, and financial assumptions.
|
||||||
|
- **Environmental Data**: Include data related to environmental impact, regulations, and sustainability practices.
|
||||||
|
- **Technical Data**: Specify technical parameters such as ore grades, recovery rates, and processing methods.
|
||||||
|
- **Social Data**: Incorporate social impact assessments, community engagement plans, and stakeholder analysis.
|
||||||
|
- **Regulatory Data**: Include data related to legal and regulatory requirements, permits, and compliance measures.
|
||||||
|
- **Market Data**: Input market conditions, commodity prices, and economic indicators that may affect the mining operation.
|
||||||
|
- **Risk Data**: Define risk factors, probabilities, and mitigation strategies for the mining project.
|
||||||
|
- **Logistical Data**: Include data related to transportation, supply chain management, and infrastructure requirements.
|
||||||
|
- **Maintenance Data**: Input maintenance schedules, costs, and equipment reliability metrics.
|
||||||
|
- **Human Resources Data**: Define workforce requirements, training programs, and labor costs.
|
||||||
|
- **Health and Safety Data**: Include data related to workplace safety protocols, incident rates, and health programs.
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
# Baseline Seed Data Plan
|
|
||||||
|
|
||||||
This document captures the datasets that should be present in a fresh CalMiner installation and the structure required to manage them through `scripts/seed_data.py`.
|
|
||||||
|
|
||||||
## Currency Catalog
|
|
||||||
|
|
||||||
The `currency` table already exists and is seeded today via `scripts/seed_data.py`. The goal is to keep the canonical list in one place and ensure the default currency (USD) is always active.
|
|
||||||
|
|
||||||
| Code | Name | Symbol | Notes |
|
|
||||||
| ---- | ------------------- | ------ | ---------------------------------------- |
|
|
||||||
| USD | US Dollar | $ | Default currency (`DEFAULT_CURRENCY_CODE`) |
|
|
||||||
| EUR | Euro | EUR symbol | |
|
|
||||||
| CLP | Chilean Peso | $ | |
|
|
||||||
| RMB | Chinese Yuan | RMB symbol | |
|
|
||||||
| GBP | British Pound | GBP symbol | |
|
|
||||||
| CAD | Canadian Dollar | $ | |
|
|
||||||
| AUD | Australian Dollar | $ | |
|
|
||||||
|
|
||||||
Seeding behaviour:
|
|
||||||
|
|
||||||
- Upsert by ISO code; keep existing name/symbol when updated manually.
|
|
||||||
- Ensure `is_active` remains true for USD and defaults to true for new rows.
|
|
||||||
- Defer to runtime validation in `routes.currencies` for enforcing default behaviour.
|
|
||||||
|
|
||||||
## Measurement Units
|
|
||||||
|
|
||||||
UI routes (`routes/ui.py`) currently rely on the in-memory `MEASUREMENT_UNITS` list to populate dropdowns for consumption and production forms. To make this configurable and available to the API, introduce a dedicated `measurement_unit` table and seed it.
|
|
||||||
|
|
||||||
Proposed schema:
|
|
||||||
|
|
||||||
| Column | Type | Notes |
|
|
||||||
| ------------- | -------------- | ------------------------------------ |
|
|
||||||
| id | SERIAL / BIGINT | Primary key. |
|
|
||||||
| code | TEXT | Stable slug (e.g. `tonnes`). Unique. |
|
|
||||||
| name | TEXT | Display label. |
|
|
||||||
| symbol | TEXT | Short symbol (nullable). |
|
|
||||||
| unit_type | TEXT | Category (`mass`, `volume`, `energy`).|
|
|
||||||
| is_active | BOOLEAN | Default `true` for soft disabling. |
|
|
||||||
| created_at | TIMESTAMP | Optional `NOW()` default. |
|
|
||||||
| updated_at | TIMESTAMP | Optional `NOW()` trigger/default. |
|
|
||||||
|
|
||||||
Initial seed set (mirrors existing UI list plus type categorisation):
|
|
||||||
|
|
||||||
| Code | Name | Symbol | Unit Type |
|
|
||||||
| --------------- | ---------------- | ------ | --------- |
|
|
||||||
| tonnes | Tonnes | t | mass |
|
|
||||||
| kilograms | Kilograms | kg | mass |
|
|
||||||
| pounds | Pounds | lb | mass |
|
|
||||||
| liters | Liters | L | volume |
|
|
||||||
| cubic_meters | Cubic Meters | m3 | volume |
|
|
||||||
| kilowatt_hours | Kilowatt Hours | kWh | energy |
|
|
||||||
|
|
||||||
Seeding behaviour:
|
|
||||||
|
|
||||||
- Upsert rows by `code`.
|
|
||||||
- Preserve `unit_type` and `symbol` unless explicitly changed via administration tooling.
|
|
||||||
- Continue surfacing unit options to the UI by querying this table instead of the static constant.
|
|
||||||
|
|
||||||
## Default Settings
|
|
||||||
|
|
||||||
The application expects certain defaults to exist:
|
|
||||||
|
|
||||||
- **Default currency**: enforced by `routes.currencies._ensure_default_currency`; ensure seeds keep USD active.
|
|
||||||
- **Fallback measurement unit**: UI currently auto-selects the first option in the list. Once units move to the database, expose an application setting to choose a fallback (future work tracked under "Application Settings management").
|
|
||||||
|
|
||||||
## Seeding Structure Updates
|
|
||||||
|
|
||||||
To support the datasets above:
|
|
||||||
|
|
||||||
1. Extend `scripts/seed_data.py` with a `SeedDataset` registry so each dataset (currencies, units, future defaults) can declare its loader/upsert function and optional dependencies.
|
|
||||||
2. Add a `--dataset` CLI selector for targeted seeding while keeping `--all` as the default for `setup_database.py` integrations.
|
|
||||||
3. Update `scripts/setup_database.py` to:
|
|
||||||
- Run migration ensuring `measurement_unit` table exists.
|
|
||||||
- Execute the unit seeder after currencies when `--seed-data` is supplied.
|
|
||||||
- Verify post-seed counts, logging which dataset was inserted/updated.
|
|
||||||
4. Adjust UI routes to load measurement units from the database and remove the hard-coded list once the table is available.
|
|
||||||
|
|
||||||
This plan aligns with the TODO item for seeding initial data and lays the groundwork for consolidating migrations around a single baseline file that introduces both the schema and seed data in an idempotent manner.
|
|
||||||
5
main.py
5
main.py
@@ -32,6 +32,11 @@ async def json_validation(
|
|||||||
return await validate_json(request, call_next)
|
return await validate_json(request, call_next)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health", summary="Container health probe")
|
||||||
|
async def health() -> dict[str, str]:
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||||
|
|
||||||
# Include API routers
|
# Include API routers
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
|
playwright
|
||||||
pytest
|
pytest
|
||||||
pytest-cov
|
pytest-cov
|
||||||
pytest-httpx
|
pytest-httpx
|
||||||
playwright
|
|
||||||
pytest-playwright
|
pytest-playwright
|
||||||
ruff
|
python-jose
|
||||||
|
ruff
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
fastapi
|
fastapi
|
||||||
|
pydantic>=2.0,<3.0
|
||||||
uvicorn
|
uvicorn
|
||||||
sqlalchemy
|
sqlalchemy
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
@@ -7,3 +8,5 @@ httpx
|
|||||||
jinja2
|
jinja2
|
||||||
pandas
|
pandas
|
||||||
numpy
|
numpy
|
||||||
|
passlib
|
||||||
|
python-jose
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Dict, List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ class ThemeSettings(BaseModel):
|
|||||||
@router.post("/theme")
|
@router.post("/theme")
|
||||||
async def update_theme(theme_data: ThemeSettings, db: Session = Depends(get_db)):
|
async def update_theme(theme_data: ThemeSettings, db: Session = Depends(get_db)):
|
||||||
data_dict = theme_data.model_dump()
|
data_dict = theme_data.model_dump()
|
||||||
saved = save_theme_settings(db, data_dict)
|
save_theme_settings(db, data_dict)
|
||||||
return {"message": "Theme updated", "theme": data_dict}
|
return {"message": "Theme updated", "theme": data_dict}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,40 +1,21 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from fastapi.security import OAuth2PasswordBearer
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from config.database import get_db
|
from config.database import get_db
|
||||||
from models.user import User
|
from models.user import User
|
||||||
from services.security import get_password_hash, verify_password, create_access_token, SECRET_KEY, ALGORITHM
|
from services.security import create_access_token, get_current_user
|
||||||
from jose import jwt, JWTError
|
from schemas.user import (
|
||||||
from schemas.user import UserCreate, UserInDB, UserLogin, UserUpdate, PasswordResetRequest, PasswordReset, Token
|
PasswordReset,
|
||||||
|
PasswordResetRequest,
|
||||||
|
UserCreate,
|
||||||
|
UserInDB,
|
||||||
|
UserLogin,
|
||||||
|
UserUpdate,
|
||||||
|
)
|
||||||
|
|
||||||
router = APIRouter(prefix="/users", tags=["users"])
|
router = APIRouter(prefix="/users", tags=["users"])
|
||||||
|
|
||||||
|
|
||||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="users/login")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)):
|
|
||||||
credentials_exception = HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Could not validate credentials",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
|
||||||
username: str = payload.get("sub")
|
|
||||||
if username is None:
|
|
||||||
raise credentials_exception
|
|
||||||
if username is None:
|
|
||||||
raise credentials_exception
|
|
||||||
except JWTError:
|
|
||||||
raise credentials_exception
|
|
||||||
user = db.query(User).filter(User.username == username).first()
|
|
||||||
if user is None:
|
|
||||||
raise credentials_exception
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/register", response_model=UserInDB, status_code=status.HTTP_201_CREATED)
|
@router.post("/register", response_model=UserInDB, status_code=status.HTTP_201_CREATED)
|
||||||
async def register_user(user: UserCreate, db: Session = Depends(get_db)):
|
async def register_user(user: UserCreate, db: Session = Depends(get_db)):
|
||||||
db_user = db.query(User).filter(User.username == user.username).first()
|
db_user = db.query(User).filter(User.username == user.username).first()
|
||||||
@@ -87,7 +68,7 @@ async def update_user_me(user_update: UserUpdate, current_user: User = Depends(g
|
|||||||
if existing_user:
|
if existing_user:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Username already taken")
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Username already taken")
|
||||||
current_user.username = user_update.username
|
setattr(current_user, "username", user_update.username)
|
||||||
|
|
||||||
if user_update.email and user_update.email != current_user.email:
|
if user_update.email and user_update.email != current_user.email:
|
||||||
existing_user = db.query(User).filter(
|
existing_user = db.query(User).filter(
|
||||||
@@ -95,7 +76,7 @@ async def update_user_me(user_update: UserUpdate, current_user: User = Depends(g
|
|||||||
if existing_user:
|
if existing_user:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
|
||||||
current_user.email = user_update.email
|
setattr(current_user, "email", user_update.email)
|
||||||
|
|
||||||
if user_update.password:
|
if user_update.password:
|
||||||
current_user.set_password(user_update.password)
|
current_user.set_password(user_update.password)
|
||||||
|
|||||||
@@ -43,15 +43,14 @@ def backfill(
|
|||||||
engine = create_engine(db_url)
|
engine = create_engine(db_url)
|
||||||
with engine.begin() as conn:
|
with engine.begin() as conn:
|
||||||
# Ensure currency table exists
|
# Ensure currency table exists
|
||||||
res = (
|
if db_url.startswith("sqlite:"):
|
||||||
conn.execute(
|
conn.execute(
|
||||||
text(
|
text(
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='currency';"
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='currency';"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if db_url.startswith("sqlite:")
|
else:
|
||||||
else conn.execute(text("SELECT to_regclass('public.currency');"))
|
conn.execute(text("SELECT to_regclass('public.currency');"))
|
||||||
)
|
|
||||||
# Note: we don't strictly depend on the above - we assume migration was already applied
|
# Note: we don't strictly depend on the above - we assume migration was already applied
|
||||||
|
|
||||||
# Helper: find or create currency by code
|
# Helper: find or create currency by code
|
||||||
|
|||||||
@@ -158,4 +158,32 @@ ALTER TABLE capex
|
|||||||
ALTER TABLE opex
|
ALTER TABLE opex
|
||||||
DROP COLUMN IF EXISTS currency_code;
|
DROP COLUMN IF EXISTS currency_code;
|
||||||
|
|
||||||
|
-- Role-based access control tables
|
||||||
|
CREATE TABLE IF NOT EXISTS roles (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) UNIQUE NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
username VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
email VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
hashed_password VARCHAR(255) NOT NULL,
|
||||||
|
role_id INTEGER NOT NULL REFERENCES roles (id) ON DELETE RESTRICT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_users_username ON users (username);
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_users_email ON users (email);
|
||||||
|
|
||||||
|
-- Theme settings configuration table
|
||||||
|
CREATE TABLE IF NOT EXISTS theme_settings (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
theme_name VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
primary_color VARCHAR(7) NOT NULL,
|
||||||
|
secondary_color VARCHAR(7) NOT NULL,
|
||||||
|
accent_color VARCHAR(7) NOT NULL,
|
||||||
|
background_color VARCHAR(7) NOT NULL,
|
||||||
|
text_color VARCHAR(7) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
-- Migration: Create application_setting table for configurable application options
|
|
||||||
-- Date: 2025-10-25
|
|
||||||
-- Description: Introduces persistent storage for application-level settings such as theme colors.
|
|
||||||
|
|
||||||
BEGIN;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS application_setting (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
key VARCHAR(128) NOT NULL UNIQUE,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
value_type VARCHAR(32) NOT NULL DEFAULT 'string',
|
|
||||||
category VARCHAR(32) NOT NULL DEFAULT 'general',
|
|
||||||
description TEXT,
|
|
||||||
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS ux_application_setting_key
|
|
||||||
ON application_setting (key);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS ix_application_setting_category
|
|
||||||
ON application_setting (category);
|
|
||||||
|
|
||||||
COMMIT;
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
-- Migration: 20251027_create_theme_settings_table.sql
|
|
||||||
|
|
||||||
CREATE TABLE theme_settings (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
theme_name VARCHAR(255) UNIQUE NOT NULL,
|
|
||||||
primary_color VARCHAR(7) NOT NULL,
|
|
||||||
secondary_color VARCHAR(7) NOT NULL,
|
|
||||||
accent_color VARCHAR(7) NOT NULL,
|
|
||||||
background_color VARCHAR(7) NOT NULL,
|
|
||||||
text_color VARCHAR(7) NOT NULL
|
|
||||||
);
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
-- Migration: 20251027_create_user_and_role_tables.sql
|
|
||||||
|
|
||||||
CREATE TABLE roles (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
name VARCHAR(255) UNIQUE NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE users (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
username VARCHAR(255) UNIQUE NOT NULL,
|
|
||||||
email VARCHAR(255) UNIQUE NOT NULL,
|
|
||||||
hashed_password VARCHAR(255) NOT NULL,
|
|
||||||
role_id INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (role_id) REFERENCES roles(id)
|
|
||||||
);
|
|
||||||
@@ -16,8 +16,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import os
|
from typing import Optional
|
||||||
from typing import Iterable, Optional
|
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
from psycopg2 import errors
|
from psycopg2 import errors
|
||||||
@@ -135,24 +134,36 @@ def run_with_namespace(
|
|||||||
*,
|
*,
|
||||||
config: Optional[DatabaseConfig] = None,
|
config: Optional[DatabaseConfig] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
if not hasattr(args, "verbose"):
|
||||||
|
args.verbose = 0
|
||||||
|
if not hasattr(args, "dry_run"):
|
||||||
|
args.dry_run = False
|
||||||
|
|
||||||
_configure_logging(args)
|
_configure_logging(args)
|
||||||
|
|
||||||
if not any((args.currencies, args.units, args.theme, args.defaults)):
|
currencies = bool(getattr(args, "currencies", False))
|
||||||
|
units = bool(getattr(args, "units", False))
|
||||||
|
theme = bool(getattr(args, "theme", False))
|
||||||
|
defaults = bool(getattr(args, "defaults", False))
|
||||||
|
dry_run = bool(getattr(args, "dry_run", False))
|
||||||
|
|
||||||
|
if not any((currencies, units, theme, defaults)):
|
||||||
logger.info("No seeding options provided; exiting")
|
logger.info("No seeding options provided; exiting")
|
||||||
return
|
return
|
||||||
|
|
||||||
config = config or DatabaseConfig.from_env()
|
config = config or DatabaseConfig.from_env()
|
||||||
|
|
||||||
with psycopg2.connect(config.application_dsn()) as conn:
|
with psycopg2.connect(config.application_dsn()) as conn:
|
||||||
conn.autocommit = True
|
conn.autocommit = True
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
if args.currencies:
|
if currencies:
|
||||||
_seed_currencies(cursor, dry_run=args.dry_run)
|
_seed_currencies(cursor, dry_run=dry_run)
|
||||||
if args.units:
|
if units:
|
||||||
_seed_units(cursor, dry_run=args.dry_run)
|
_seed_units(cursor, dry_run=dry_run)
|
||||||
if args.theme:
|
if theme:
|
||||||
_seed_theme(cursor, dry_run=args.dry_run)
|
_seed_theme(cursor, dry_run=dry_run)
|
||||||
if args.defaults:
|
if defaults:
|
||||||
_seed_defaults(cursor, dry_run=args.dry_run)
|
_seed_defaults(cursor, dry_run=dry_run)
|
||||||
|
|
||||||
|
|
||||||
def _seed_currencies(cursor, *, dry_run: bool) -> None:
|
def _seed_currencies(cursor, *, dry_run: bool) -> None:
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ connection string; this script will still honor the granular inputs above.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from config.database import Base
|
||||||
import argparse
|
import argparse
|
||||||
import importlib
|
import importlib
|
||||||
import logging
|
import logging
|
||||||
@@ -43,7 +44,6 @@ from sqlalchemy import create_engine, inspect
|
|||||||
ROOT_DIR = Path(__file__).resolve().parents[1]
|
ROOT_DIR = Path(__file__).resolve().parents[1]
|
||||||
if str(ROOT_DIR) not in sys.path:
|
if str(ROOT_DIR) not in sys.path:
|
||||||
sys.path.insert(0, str(ROOT_DIR))
|
sys.path.insert(0, str(ROOT_DIR))
|
||||||
from config.database import Base
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -126,7 +126,8 @@ class DatabaseConfig:
|
|||||||
]
|
]
|
||||||
if missing:
|
if missing:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Missing required database configuration: " + ", ".join(missing)
|
"Missing required database configuration: " +
|
||||||
|
", ".join(missing)
|
||||||
)
|
)
|
||||||
|
|
||||||
host = cast(str, host)
|
host = cast(str, host)
|
||||||
@@ -249,7 +250,7 @@ class DatabaseSetup:
|
|||||||
descriptor = self._describe_connection(
|
descriptor = self._describe_connection(
|
||||||
self.config.admin_user, self.config.admin_database
|
self.config.admin_user, self.config.admin_database
|
||||||
)
|
)
|
||||||
logger.info("Validating admin connection (%s)", descriptor)
|
logger.info("[CONNECT] Validating admin connection (%s)", descriptor)
|
||||||
try:
|
try:
|
||||||
with self._admin_connection(self.config.admin_database) as conn:
|
with self._admin_connection(self.config.admin_database) as conn:
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
@@ -260,13 +261,14 @@ class DatabaseSetup:
|
|||||||
"Check DATABASE_ADMIN_URL or DATABASE_SUPERUSER settings."
|
"Check DATABASE_ADMIN_URL or DATABASE_SUPERUSER settings."
|
||||||
f" Target: {descriptor}"
|
f" Target: {descriptor}"
|
||||||
) from exc
|
) from exc
|
||||||
logger.info("Admin connection verified (%s)", descriptor)
|
logger.info("[CONNECT] Admin connection verified (%s)", descriptor)
|
||||||
|
|
||||||
def validate_application_connection(self) -> None:
|
def validate_application_connection(self) -> None:
|
||||||
descriptor = self._describe_connection(
|
descriptor = self._describe_connection(
|
||||||
self.config.user, self.config.database
|
self.config.user, self.config.database
|
||||||
)
|
)
|
||||||
logger.info("Validating application connection (%s)", descriptor)
|
logger.info(
|
||||||
|
"[CONNECT] Validating application connection (%s)", descriptor)
|
||||||
try:
|
try:
|
||||||
with self._application_connection() as conn:
|
with self._application_connection() as conn:
|
||||||
with conn.cursor() as cursor:
|
with conn.cursor() as cursor:
|
||||||
@@ -277,7 +279,8 @@ class DatabaseSetup:
|
|||||||
"Ensure the role exists and credentials are correct. "
|
"Ensure the role exists and credentials are correct. "
|
||||||
f"Target: {descriptor}"
|
f"Target: {descriptor}"
|
||||||
) from exc
|
) from exc
|
||||||
logger.info("Application connection verified (%s)", descriptor)
|
logger.info(
|
||||||
|
"[CONNECT] Application connection verified (%s)", descriptor)
|
||||||
|
|
||||||
def ensure_database(self) -> None:
|
def ensure_database(self) -> None:
|
||||||
"""Create the target database when it does not already exist."""
|
"""Create the target database when it does not already exist."""
|
||||||
@@ -340,7 +343,8 @@ class DatabaseSetup:
|
|||||||
rollback_label = f"drop database {self.config.database}"
|
rollback_label = f"drop database {self.config.database}"
|
||||||
self._register_rollback(
|
self._register_rollback(
|
||||||
rollback_label,
|
rollback_label,
|
||||||
lambda db=self.config.database: self._drop_database(db),
|
lambda db=self.config.database: self._drop_database(
|
||||||
|
db),
|
||||||
)
|
)
|
||||||
logger.info("Created database '%s'", self.config.database)
|
logger.info("Created database '%s'", self.config.database)
|
||||||
finally:
|
finally:
|
||||||
@@ -409,7 +413,8 @@ class DatabaseSetup:
|
|||||||
rollback_label = f"drop role {self.config.user}"
|
rollback_label = f"drop role {self.config.user}"
|
||||||
self._register_rollback(
|
self._register_rollback(
|
||||||
rollback_label,
|
rollback_label,
|
||||||
lambda role=self.config.user: self._drop_role(role),
|
lambda role=self.config.user: self._drop_role(
|
||||||
|
role),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info("Role '%s' already present", self.config.user)
|
logger.info("Role '%s' already present", self.config.user)
|
||||||
@@ -583,31 +588,28 @@ class DatabaseSetup:
|
|||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def _connect(self, dsn: str, descriptor: str) -> PGConnection:
|
||||||
|
try:
|
||||||
|
return psycopg2.connect(dsn)
|
||||||
|
except psycopg2.Error as exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Unable to establish connection. Target: {descriptor}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
def _admin_connection(self, database: Optional[str] = None) -> PGConnection:
|
def _admin_connection(self, database: Optional[str] = None) -> PGConnection:
|
||||||
target_db = database or self.config.admin_database
|
target_db = database or self.config.admin_database
|
||||||
dsn = self.config.admin_dsn(database)
|
dsn = self.config.admin_dsn(database)
|
||||||
descriptor = self._describe_connection(
|
descriptor = self._describe_connection(
|
||||||
self.config.admin_user, target_db
|
self.config.admin_user, target_db
|
||||||
)
|
)
|
||||||
try:
|
return self._connect(dsn, descriptor)
|
||||||
return psycopg2.connect(dsn)
|
|
||||||
except psycopg2.Error as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unable to establish admin connection. " f"Target: {descriptor}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
def _application_connection(self) -> PGConnection:
|
def _application_connection(self) -> PGConnection:
|
||||||
dsn = self.config.application_dsn()
|
dsn = self.config.application_dsn()
|
||||||
descriptor = self._describe_connection(
|
descriptor = self._describe_connection(
|
||||||
self.config.user, self.config.database
|
self.config.user, self.config.database
|
||||||
)
|
)
|
||||||
try:
|
return self._connect(dsn, descriptor)
|
||||||
return psycopg2.connect(dsn)
|
|
||||||
except psycopg2.Error as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unable to establish application connection. "
|
|
||||||
f"Target: {descriptor}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
def initialize_schema(self) -> None:
|
def initialize_schema(self) -> None:
|
||||||
"""Create database objects from SQLAlchemy metadata if missing."""
|
"""Create database objects from SQLAlchemy metadata if missing."""
|
||||||
@@ -701,63 +703,9 @@ class DatabaseSetup:
|
|||||||
cursor, schema_name
|
cursor, schema_name
|
||||||
)
|
)
|
||||||
|
|
||||||
if baseline_path.exists() and baseline_name not in applied:
|
self._handle_baseline_migration(
|
||||||
if self.dry_run:
|
cursor, schema_name, baseline_path, baseline_name, migration_files, applied
|
||||||
logger.info(
|
)
|
||||||
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
|
|
||||||
baseline_name,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
"Baseline migration '%s' pending; applying and marking older migrations",
|
|
||||||
baseline_name,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
baseline_applied = self._apply_migration_file(
|
|
||||||
cursor, schema_name, baseline_path
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.error(
|
|
||||||
"Failed while applying baseline migration '%s'."
|
|
||||||
" Review the migration contents and rerun with --dry-run for diagnostics.",
|
|
||||||
baseline_name,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
applied.add(baseline_applied)
|
|
||||||
legacy_files = [
|
|
||||||
path
|
|
||||||
for path in migration_files
|
|
||||||
if path.name != baseline_name
|
|
||||||
]
|
|
||||||
for legacy in legacy_files:
|
|
||||||
if legacy.name not in applied:
|
|
||||||
try:
|
|
||||||
cursor.execute(
|
|
||||||
sql.SQL(
|
|
||||||
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
|
|
||||||
).format(
|
|
||||||
sql.Identifier(
|
|
||||||
schema_name,
|
|
||||||
MIGRATIONS_TABLE,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
(legacy.name,),
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.error(
|
|
||||||
"Unable to record legacy migration '%s' after baseline application."
|
|
||||||
" Check schema_migrations table in schema '%s' for partial state.",
|
|
||||||
legacy.name,
|
|
||||||
schema_name,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
applied.add(legacy.name)
|
|
||||||
logger.info(
|
|
||||||
"Marked legacy migration '%s' as applied via baseline",
|
|
||||||
legacy.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
pending = [
|
pending = [
|
||||||
path for path in migration_files if path.name not in applied
|
path for path in migration_files if path.name not in applied
|
||||||
@@ -781,6 +729,85 @@ class DatabaseSetup:
|
|||||||
|
|
||||||
logger.info("Applied %d migrations", len(pending))
|
logger.info("Applied %d migrations", len(pending))
|
||||||
|
|
||||||
|
def _handle_baseline_migration(
|
||||||
|
self,
|
||||||
|
cursor: extensions.cursor,
|
||||||
|
schema_name: str,
|
||||||
|
baseline_path: Path,
|
||||||
|
baseline_name: str,
|
||||||
|
migration_files: list[Path],
|
||||||
|
applied: set[str],
|
||||||
|
) -> None:
|
||||||
|
if baseline_path.exists() and baseline_name not in applied:
|
||||||
|
if self.dry_run:
|
||||||
|
logger.info(
|
||||||
|
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
|
||||||
|
baseline_name,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
"[MIGRATE] Baseline migration '%s' pending; applying and marking older migrations",
|
||||||
|
baseline_name,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
baseline_applied = self._apply_migration_file(
|
||||||
|
cursor, schema_name, baseline_path
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"Failed while applying baseline migration '%s'."
|
||||||
|
" Review the migration contents and rerun with --dry-run for diagnostics.",
|
||||||
|
baseline_name,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
applied.add(baseline_applied)
|
||||||
|
self._mark_legacy_migrations_as_applied(
|
||||||
|
cursor, schema_name, migration_files, baseline_name, applied
|
||||||
|
)
|
||||||
|
|
||||||
|
def _mark_legacy_migrations_as_applied(
|
||||||
|
self,
|
||||||
|
cursor: extensions.cursor,
|
||||||
|
schema_name: str,
|
||||||
|
migration_files: list[Path],
|
||||||
|
baseline_name: str,
|
||||||
|
applied: set[str],
|
||||||
|
) -> None:
|
||||||
|
legacy_files = [
|
||||||
|
path
|
||||||
|
for path in migration_files
|
||||||
|
if path.name != baseline_name
|
||||||
|
]
|
||||||
|
for legacy in legacy_files:
|
||||||
|
if legacy.name not in applied:
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
sql.SQL(
|
||||||
|
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
|
||||||
|
).format(
|
||||||
|
sql.Identifier(
|
||||||
|
schema_name,
|
||||||
|
MIGRATIONS_TABLE,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
(legacy.name,),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"Unable to record legacy migration '%s' after baseline application."
|
||||||
|
" Check schema_migrations table in schema '%s' for partial state.",
|
||||||
|
legacy.name,
|
||||||
|
schema_name,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
applied.add(legacy.name)
|
||||||
|
logger.info(
|
||||||
|
"Marked legacy migration '%s' as applied via baseline",
|
||||||
|
legacy.name,
|
||||||
|
)
|
||||||
|
|
||||||
def _apply_migration_file(
|
def _apply_migration_file(
|
||||||
self,
|
self,
|
||||||
cursor,
|
cursor,
|
||||||
@@ -839,14 +866,23 @@ class DatabaseSetup:
|
|||||||
seed_args = argparse.Namespace(
|
seed_args = argparse.Namespace(
|
||||||
currencies=True,
|
currencies=True,
|
||||||
units=True,
|
units=True,
|
||||||
|
theme=True,
|
||||||
defaults=False,
|
defaults=False,
|
||||||
dry_run=dry_run,
|
dry_run=dry_run,
|
||||||
verbose=0,
|
verbose=0,
|
||||||
)
|
)
|
||||||
seed_data.run_with_namespace(seed_args, config=self.config)
|
try:
|
||||||
|
seed_data.run_with_namespace(seed_args, config=self.config)
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"[SEED] Failed during baseline data seeding. "
|
||||||
|
"Review seed_data.py and rerun with --dry-run for diagnostics.",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
logger.info("Dry run: skipped seed verification")
|
logger.info("[SEED] Dry run: skipped seed verification")
|
||||||
return
|
return
|
||||||
|
|
||||||
expected_currencies = {
|
expected_currencies = {
|
||||||
@@ -892,7 +928,7 @@ class DatabaseSetup:
|
|||||||
raise RuntimeError(message)
|
raise RuntimeError(message)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Verified %d seeded currencies present",
|
"[VERIFY] Verified %d seeded currencies present",
|
||||||
len(found_codes),
|
len(found_codes),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -914,7 +950,8 @@ class DatabaseSetup:
|
|||||||
logger.error(message)
|
logger.error(message)
|
||||||
raise RuntimeError(message)
|
raise RuntimeError(message)
|
||||||
else:
|
else:
|
||||||
logger.info("Verified default currency 'USD' active")
|
logger.info(
|
||||||
|
"[VERIFY] Verified default currency 'USD' active")
|
||||||
|
|
||||||
if expected_unit_codes:
|
if expected_unit_codes:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
from jose import jwt
|
from fastapi import HTTPException, status, Depends
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
from jose import jwt, JWTError
|
||||||
from passlib.context import CryptContext
|
from passlib.context import CryptContext
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from config.database import get_db
|
||||||
|
|
||||||
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
||||||
@@ -11,6 +16,8 @@ ALGORITHM = "HS256"
|
|||||||
|
|
||||||
pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
|
pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
|
||||||
|
|
||||||
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="users/login")
|
||||||
|
|
||||||
|
|
||||||
def create_access_token(
|
def create_access_token(
|
||||||
subject: Union[str, Any], expires_delta: Union[timedelta, None] = None
|
subject: Union[str, Any], expires_delta: Union[timedelta, None] = None
|
||||||
@@ -30,3 +37,23 @@ def verify_password(plain_password: str, hashed_password: str) -> bool:
|
|||||||
|
|
||||||
def get_password_hash(password: str) -> str:
|
def get_password_hash(password: str) -> str:
|
||||||
return pwd_context.hash(password)
|
return pwd_context.hash(password)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)):
|
||||||
|
from models.user import User
|
||||||
|
credentials_exception = HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
username = payload.get("sub")
|
||||||
|
if username is None:
|
||||||
|
raise credentials_exception
|
||||||
|
except JWTError:
|
||||||
|
raise credentials_exception
|
||||||
|
user = db.query(User).filter(User.username == username).first()
|
||||||
|
if user is None:
|
||||||
|
raise credentials_exception
|
||||||
|
return user
|
||||||
|
|||||||
@@ -41,6 +41,30 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const THEME_API_URL = '/api/settings/theme';
|
||||||
|
|
||||||
|
const normalizeTheme = (theme) => {
|
||||||
|
if (!theme || typeof theme !== 'object') {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
const {
|
||||||
|
theme_name,
|
||||||
|
primary_color,
|
||||||
|
secondary_color,
|
||||||
|
accent_color,
|
||||||
|
background_color,
|
||||||
|
text_color,
|
||||||
|
} = theme;
|
||||||
|
return {
|
||||||
|
theme_name,
|
||||||
|
primary_color,
|
||||||
|
secondary_color,
|
||||||
|
accent_color,
|
||||||
|
background_color,
|
||||||
|
text_color,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
if (themeSettingsForm) {
|
if (themeSettingsForm) {
|
||||||
themeSettingsForm.addEventListener('submit', async (event) => {
|
themeSettingsForm.addEventListener('submit', async (event) => {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
@@ -49,7 +73,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
const themeData = Object.fromEntries(formData.entries());
|
const themeData = Object.fromEntries(formData.entries());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch('/api/theme-settings', {
|
const response = await fetch(THEME_API_URL, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -58,9 +82,11 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
|
const payload = await response.json();
|
||||||
|
const savedTheme = normalizeTheme(payload?.theme ?? themeData);
|
||||||
alert('Theme settings saved successfully!');
|
alert('Theme settings saved successfully!');
|
||||||
applyTheme(themeData);
|
applyTheme(savedTheme);
|
||||||
saveTheme(themeData);
|
saveTheme(savedTheme);
|
||||||
} else {
|
} else {
|
||||||
const errorData = await response.json();
|
const errorData = await response.json();
|
||||||
alert(`Error saving theme settings: ${errorData.detail}`);
|
alert(`Error saving theme settings: ${errorData.detail}`);
|
||||||
@@ -91,9 +117,9 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||||||
// If no saved theme, load from backend (if available)
|
// If no saved theme, load from backend (if available)
|
||||||
async function loadAndApplyThemeFromServer() {
|
async function loadAndApplyThemeFromServer() {
|
||||||
try {
|
try {
|
||||||
const response = await fetch('/api/theme-settings'); // Assuming a GET endpoint for theme settings
|
const response = await fetch(THEME_API_URL);
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const theme = await response.json();
|
const theme = normalizeTheme(await response.json());
|
||||||
applyTheme(theme);
|
applyTheme(theme);
|
||||||
saveTheme(theme); // Save to local storage for future use
|
saveTheme(theme); // Save to local storage for future use
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
46
tests/unit/test_seed_data.py
Normal file
46
tests/unit/test_seed_data.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import argparse
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import scripts.seed_data as seed_data
|
||||||
|
from scripts.seed_data import DatabaseConfig
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_with_namespace_handles_missing_theme_flag_without_actions() -> None:
|
||||||
|
args = argparse.Namespace(currencies=False, units=False, defaults=False)
|
||||||
|
config = mock.create_autospec(DatabaseConfig)
|
||||||
|
config.application_dsn.return_value = "postgresql://example"
|
||||||
|
|
||||||
|
with (
|
||||||
|
mock.patch("scripts.seed_data._configure_logging") as configure_logging,
|
||||||
|
mock.patch("scripts.seed_data.psycopg2.connect") as connect_mock,
|
||||||
|
mock.patch.object(seed_data.logger, "info") as info_mock,
|
||||||
|
):
|
||||||
|
seed_data.run_with_namespace(args, config=config)
|
||||||
|
|
||||||
|
configure_logging.assert_called_once()
|
||||||
|
connect_mock.assert_not_called()
|
||||||
|
info_mock.assert_called_with("No seeding options provided; exiting")
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_with_namespace_seeds_defaults_without_theme_flag() -> None:
|
||||||
|
args = argparse.Namespace(
|
||||||
|
currencies=False, units=False, defaults=True, dry_run=False)
|
||||||
|
config = mock.create_autospec(DatabaseConfig)
|
||||||
|
config.application_dsn.return_value = "postgresql://example"
|
||||||
|
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
cursor_context = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
connection_mock.__enter__.return_value = connection_mock
|
||||||
|
connection_mock.cursor.return_value = cursor_context
|
||||||
|
cursor_context.__enter__.return_value = cursor_mock
|
||||||
|
|
||||||
|
with (
|
||||||
|
mock.patch("scripts.seed_data._configure_logging"),
|
||||||
|
mock.patch("scripts.seed_data.psycopg2.connect", return_value=connection_mock) as connect_mock,
|
||||||
|
mock.patch("scripts.seed_data._seed_defaults") as seed_defaults,
|
||||||
|
):
|
||||||
|
seed_data.run_with_namespace(args, config=config)
|
||||||
|
|
||||||
|
connect_mock.assert_called_once_with(config.application_dsn())
|
||||||
|
seed_defaults.assert_called_once_with(cursor_mock, dry_run=False)
|
||||||
@@ -46,6 +46,7 @@ def test_seed_baseline_data_dry_run_skips_verification(
|
|||||||
assert namespace_arg.dry_run is True
|
assert namespace_arg.dry_run is True
|
||||||
assert namespace_arg.currencies is True
|
assert namespace_arg.currencies is True
|
||||||
assert namespace_arg.units is True
|
assert namespace_arg.units is True
|
||||||
|
assert namespace_arg.theme is True
|
||||||
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
||||||
verify_mock.assert_not_called()
|
verify_mock.assert_not_called()
|
||||||
|
|
||||||
@@ -67,6 +68,7 @@ def test_seed_baseline_data_invokes_verification(
|
|||||||
assert isinstance(namespace_arg, argparse.Namespace)
|
assert isinstance(namespace_arg, argparse.Namespace)
|
||||||
assert namespace_arg.dry_run is False
|
assert namespace_arg.dry_run is False
|
||||||
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
||||||
|
assert namespace_arg.theme is True
|
||||||
verify_mock.assert_called_once_with(
|
verify_mock.assert_called_once_with(
|
||||||
expected_currency_codes=expected_currencies,
|
expected_currency_codes=expected_currencies,
|
||||||
expected_unit_codes=expected_units,
|
expected_unit_codes=expected_units,
|
||||||
|
|||||||
@@ -1,15 +1,8 @@
|
|||||||
import pytest
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from main import app
|
|
||||||
from models.theme_setting import ThemeSetting
|
|
||||||
from services.settings import save_theme_settings, get_theme_settings
|
from services.settings import save_theme_settings, get_theme_settings
|
||||||
|
|
||||||
|
|
||||||
client = TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
def test_save_theme_settings(db_session: Session):
|
def test_save_theme_settings(db_session: Session):
|
||||||
theme_data = {
|
theme_data = {
|
||||||
"theme_name": "dark",
|
"theme_name": "dark",
|
||||||
|
|||||||
Reference in New Issue
Block a user