Compare commits
16 Commits
659b66cc28
...
feat/app-s
| Author | SHA1 | Date | |
|---|---|---|---|
| c71908c8d9 | |||
| 75f533b87b | |||
| 5b1322ddbc | |||
| 713c9feebb | |||
| e74ec79cc9 | |||
| f3ce095b71 | |||
| 4e1658a638 | |||
| bff75a722e | |||
| d455320eea | |||
| 2182f723f7 | |||
| b3e6546bb9 | |||
| 5c66bf7899 | |||
| 9bd5b60d7a | |||
| 01a702847d | |||
| 1237902d55 | |||
| dd3f3141e3 |
@@ -3,6 +3,20 @@ on: [push]
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_DB: calminer_ci
|
||||||
|
POSTGRES_USER: calminer
|
||||||
|
POSTGRES_PASSWORD: secret
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd "pg_isready -U calminer -d calminer_ci"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 10
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -11,18 +25,101 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
- name: Cache pip
|
- name: Configure apt proxy
|
||||||
uses: https://github.com/actions/cache@v4
|
run: |
|
||||||
with:
|
set -euo pipefail
|
||||||
path: ~/.cache/pip
|
PROXY_HOST="http://apt-cacher:3142"
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
if ! curl -fsS --connect-timeout 3 "${PROXY_HOST}" >/dev/null; then
|
||||||
restore-keys: |
|
PROXY_HOST="http://192.168.88.14:3142"
|
||||||
${{ runner.os }}-pip-
|
fi
|
||||||
|
echo "Using APT proxy ${PROXY_HOST}"
|
||||||
|
echo "http_proxy=${PROXY_HOST}" >> "$GITHUB_ENV"
|
||||||
|
echo "https_proxy=${PROXY_HOST}" >> "$GITHUB_ENV"
|
||||||
|
echo "HTTP_PROXY=${PROXY_HOST}" >> "$GITHUB_ENV"
|
||||||
|
echo "HTTPS_PROXY=${PROXY_HOST}" >> "$GITHUB_ENV"
|
||||||
|
sudo tee /etc/apt/apt.conf.d/01proxy >/dev/null <<EOF
|
||||||
|
Acquire::http::Proxy "${PROXY_HOST}";
|
||||||
|
Acquire::https::Proxy "${PROXY_HOST}";
|
||||||
|
EOF
|
||||||
|
# - name: Cache pip
|
||||||
|
# uses: actions/cache@v4
|
||||||
|
# with:
|
||||||
|
# path: ~/.cache/pip
|
||||||
|
# key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'requirements-test.txt') }}
|
||||||
|
# restore-keys: |
|
||||||
|
# ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
||||||
|
# ${{ runner.os }}-pip-
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
pip install -r requirements-test.txt
|
pip install -r requirements-test.txt
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: |
|
||||||
|
python -m playwright install --with-deps
|
||||||
|
- name: Wait for database service
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: postgresql
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: "5432"
|
||||||
|
DATABASE_NAME: calminer_ci
|
||||||
|
DATABASE_USER: calminer
|
||||||
|
DATABASE_PASSWORD: secret
|
||||||
|
DATABASE_SCHEMA: public
|
||||||
|
DATABASE_SUPERUSER: calminer
|
||||||
|
DATABASE_SUPERUSER_PASSWORD: secret
|
||||||
|
DATABASE_SUPERUSER_DB: calminer_ci
|
||||||
|
run: |
|
||||||
|
python - <<'PY'
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
dsn = (
|
||||||
|
f"dbname={os.environ['DATABASE_SUPERUSER_DB']} "
|
||||||
|
f"user={os.environ['DATABASE_SUPERUSER']} "
|
||||||
|
f"password={os.environ['DATABASE_SUPERUSER_PASSWORD']} "
|
||||||
|
f"host={os.environ['DATABASE_HOST']} "
|
||||||
|
f"port={os.environ['DATABASE_PORT']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
for attempt in range(30):
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(dsn):
|
||||||
|
break
|
||||||
|
except psycopg2.OperationalError:
|
||||||
|
time.sleep(2)
|
||||||
|
else:
|
||||||
|
raise SystemExit("Postgres service did not become available")
|
||||||
|
PY
|
||||||
|
- name: Run database setup (dry run)
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: postgresql
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: "5432"
|
||||||
|
DATABASE_NAME: calminer_ci
|
||||||
|
DATABASE_USER: calminer
|
||||||
|
DATABASE_PASSWORD: secret
|
||||||
|
DATABASE_SCHEMA: public
|
||||||
|
DATABASE_SUPERUSER: calminer
|
||||||
|
DATABASE_SUPERUSER_PASSWORD: secret
|
||||||
|
DATABASE_SUPERUSER_DB: calminer_ci
|
||||||
|
run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
||||||
|
- name: Run database setup
|
||||||
|
env:
|
||||||
|
DATABASE_DRIVER: postgresql
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: "5432"
|
||||||
|
DATABASE_NAME: calminer_ci
|
||||||
|
DATABASE_USER: calminer
|
||||||
|
DATABASE_PASSWORD: secret
|
||||||
|
DATABASE_SCHEMA: public
|
||||||
|
DATABASE_SUPERUSER: calminer
|
||||||
|
DATABASE_SUPERUSER_PASSWORD: secret
|
||||||
|
DATABASE_SUPERUSER_DB: calminer_ci
|
||||||
|
run: python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
env:
|
env:
|
||||||
DATABASE_URL: sqlite:///./test_calminer.db
|
DATABASE_URL: postgresql+psycopg2://calminer:secret@postgres:5432/calminer_ci
|
||||||
|
DATABASE_SCHEMA: public
|
||||||
run: pytest
|
run: pytest
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -16,6 +16,9 @@ env/
|
|||||||
|
|
||||||
# environment variables
|
# environment variables
|
||||||
.env
|
.env
|
||||||
|
*.env
|
||||||
|
# except example files
|
||||||
|
!config/*.env.example
|
||||||
|
|
||||||
# github instruction files
|
# github instruction files
|
||||||
.github/instructions/
|
.github/instructions/
|
||||||
|
|||||||
@@ -21,13 +21,18 @@ A range of features are implemented to support these functionalities.
|
|||||||
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
- **Unified UI Shell**: Server-rendered templates extend a shared base layout with a persistent left sidebar linking scenarios, parameters, costs, consumption, production, equipment, maintenance, simulations, and reporting views.
|
||||||
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
- **Operations Overview Dashboard**: The root route (`/`) surfaces cross-scenario KPIs, charts, and maintenance reminders with a one-click refresh backed by aggregated loaders.
|
||||||
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
- **Theming Tokens**: Shared CSS variables in `static/css/main.css` centralize the UI color palette for consistent styling and rapid theme tweaks.
|
||||||
- **Modular Frontend Scripts**: Page-specific interactions now live in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
- **Settings Center**: The Settings landing page exposes visual theme controls and links to currency administration, backed by persisted application settings and environment overrides.
|
||||||
|
- **Modular Frontend Scripts**: Page-specific interactions in `static/js/` modules, keeping templates lean while enabling browser caching and reuse.
|
||||||
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
- **Monte Carlo Simulation (in progress)**: Services and routes are scaffolded for future stochastic analysis.
|
||||||
|
|
||||||
## Documentation & quickstart
|
## Documentation & quickstart
|
||||||
|
|
||||||
This repository contains detailed developer and architecture documentation in the `docs/` folder.
|
This repository contains detailed developer and architecture documentation in the `docs/` folder.
|
||||||
|
|
||||||
|
### Settings overview
|
||||||
|
|
||||||
|
The Settings page (`/ui/settings`) lets administrators adjust global theme colors stored in the `application_setting` table. Changes are instantly applied across the UI. Environment variables prefixed with `CALMINER_THEME_` (for example, `CALMINER_THEME_COLOR_PRIMARY`) automatically override individual CSS variables and render as read-only in the form, ensuring deployment-time overrides take precedence while remaining visible to operators.
|
||||||
|
|
||||||
[Quickstart](docs/quickstart.md) contains developer quickstart, migrations, testing and current status.
|
[Quickstart](docs/quickstart.md) contains developer quickstart, migrations, testing and current status.
|
||||||
|
|
||||||
Key architecture documents: see [architecture](docs/architecture/README.md) for the arc42-based architecture documentation.
|
Key architecture documents: see [architecture](docs/architecture/README.md) for the arc42-based architecture documentation.
|
||||||
|
|||||||
11
config/setup_staging.env.example
Normal file
11
config/setup_staging.env.example
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Sample environment configuration for staging deployment
|
||||||
|
DATABASE_HOST=staging-db.internal
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_NAME=calminer_staging
|
||||||
|
DATABASE_USER=calminer_app
|
||||||
|
DATABASE_PASSWORD=<app-password>
|
||||||
|
|
||||||
|
# Admin connection used for provisioning database and roles
|
||||||
|
DATABASE_SUPERUSER=postgres
|
||||||
|
DATABASE_SUPERUSER_PASSWORD=<admin-password>
|
||||||
|
DATABASE_SUPERUSER_DB=postgres
|
||||||
14
config/setup_test.env.example
Normal file
14
config/setup_test.env.example
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Sample environment configuration for running scripts/setup_database.py against a test instance
|
||||||
|
DATABASE_DRIVER=postgresql
|
||||||
|
DATABASE_HOST=postgres
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
DATABASE_NAME=calminer_test
|
||||||
|
DATABASE_USER=calminer_test
|
||||||
|
DATABASE_PASSWORD=<test-password>
|
||||||
|
# optional: specify schema if different from 'public'
|
||||||
|
#DATABASE_SCHEMA=public
|
||||||
|
|
||||||
|
# Admin connection used for provisioning database and roles
|
||||||
|
DATABASE_SUPERUSER=postgres
|
||||||
|
DATABASE_SUPERUSER_PASSWORD=<superuser-password>
|
||||||
|
DATABASE_SUPERUSER_DB=postgres
|
||||||
23
docker-compose.postgres.yml
Normal file
23
docker-compose.postgres.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
version: "3.9"
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
container_name: calminer_postgres_local
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: calminer_local
|
||||||
|
POSTGRES_USER: calminer
|
||||||
|
POSTGRES_PASSWORD: secret
|
||||||
|
ports:
|
||||||
|
- "5433:5432"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U calminer -d calminer_local"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 10
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
@@ -10,22 +10,58 @@ status: skeleton
|
|||||||
|
|
||||||
> e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates.
|
> e.g., choice of FastAPI, PostgreSQL, SQLAlchemy, Chart.js, Jinja2 templates.
|
||||||
|
|
||||||
|
The architecture of CalMiner is influenced by several technical constraints that shape its design and implementation:
|
||||||
|
|
||||||
|
1. **Framework Selection**: The choice of FastAPI as the web framework imposes constraints on how the application handles requests, routing, and middleware. FastAPI's asynchronous capabilities must be leveraged appropriately to ensure optimal performance.
|
||||||
|
2. **Database Technology**: The use of PostgreSQL as the primary database system dictates the data modeling, querying capabilities, and transaction management strategies. SQLAlchemy ORM is used for database interactions, which requires adherence to its conventions and limitations.
|
||||||
|
3. **Frontend Technologies**: The decision to use Jinja2 for server-side templating and Chart.js for data visualization influences the structure of the frontend code and the way dynamic content is rendered.
|
||||||
|
4. **Simulation Logic**: The Monte Carlo simulation logic must be designed to efficiently handle large datasets and perform computations within the constraints of the chosen programming language (Python) and its libraries.
|
||||||
|
|
||||||
## Organizational Constraints
|
## Organizational Constraints
|
||||||
|
|
||||||
> e.g., team skillsets, development workflows, CI/CD pipelines.
|
> e.g., team skillsets, development workflows, CI/CD pipelines.
|
||||||
|
|
||||||
|
Restrictions arising from organizational factors include:
|
||||||
|
|
||||||
|
1. **Team Expertise**: The development team’s familiarity with FastAPI, SQLAlchemy, and frontend technologies like Jinja2 and Chart.js influences the architecture choices to ensure maintainability and ease of development.
|
||||||
|
2. **Development Processes**: The adoption of Agile methodologies and CI/CD pipelines (using Gitea Actions) shapes the architecture to support continuous integration, automated testing, and deployment practices.
|
||||||
|
3. **Collaboration Tools**: The use of specific collaboration and version control tools (e.g., Gitea) affects how code is managed, reviewed, and integrated, impacting the overall architecture and development workflow.
|
||||||
|
4. **Documentation Standards**: The requirement for comprehensive documentation (as seen in the `docs/` folder) necessitates an architecture that is well-structured and easy to understand for both current and future team members.
|
||||||
|
5. **Knowledge Sharing**: The need for effective knowledge sharing and onboarding processes influences the architecture to ensure that it is accessible and understandable for new team members.
|
||||||
|
6. **Resource Availability**: The availability of hardware, software, and human resources within the organization can impose constraints on the architecture, affecting decisions related to scalability, performance, and feature implementation.
|
||||||
|
|
||||||
## Regulatory Constraints
|
## Regulatory Constraints
|
||||||
|
|
||||||
> e.g., data privacy laws, industry standards.
|
> e.g., data privacy laws, industry standards.
|
||||||
|
|
||||||
|
Regulatory constraints that impact the architecture of CalMiner include:
|
||||||
|
|
||||||
|
1. **Data Privacy Compliance**: The architecture must ensure compliance with data privacy regulations such as GDPR or CCPA, which may dictate how user data is collected, stored, and processed.
|
||||||
|
2. **Industry Standards**: Adherence to industry-specific standards and best practices may influence the design of data models, security measures, and reporting functionalities.
|
||||||
|
3. **Auditability**: The system may need to incorporate logging and auditing features to meet regulatory requirements, affecting the architecture of data storage and access controls.
|
||||||
|
4. **Data Retention Policies**: Regulatory requirements regarding data retention and deletion may impose constraints on how long certain types of data can be stored, influencing database design and data lifecycle management.
|
||||||
|
5. **Security Standards**: Compliance with security standards (e.g., ISO/IEC 27001) may necessitate the implementation of specific security measures, such as encryption, access controls, and vulnerability management, which impact the overall architecture.
|
||||||
|
|
||||||
## Environmental Constraints
|
## Environmental Constraints
|
||||||
|
|
||||||
> e.g., deployment environments, cloud provider limitations.
|
> e.g., deployment environments, cloud provider limitations.
|
||||||
|
|
||||||
|
Environmental constraints affecting the architecture include:
|
||||||
|
|
||||||
|
1. **Deployment Environments**: The architecture must accommodate various deployment environments (development, testing, production) with differing configurations and resource allocations.
|
||||||
|
2. **Cloud Provider Limitations**: If deployed on a specific cloud provider, the architecture may need to align with the provider's services, limitations, and best practices, such as using managed databases or specific container orchestration tools.
|
||||||
|
3. **Containerization**: The use of Docker for containerization imposes constraints on how the application is packaged, deployed, and scaled, influencing the architecture to ensure compatibility with container orchestration platforms.
|
||||||
|
4. **Scalability Requirements**: The architecture must be designed to scale efficiently based on anticipated load and usage patterns, considering the limitations of the chosen infrastructure.
|
||||||
|
|
||||||
## Performance Constraints
|
## Performance Constraints
|
||||||
|
|
||||||
> e.g., response time requirements, scalability needs.
|
> e.g., response time requirements, scalability needs.
|
||||||
|
|
||||||
|
Current performance constraints include:
|
||||||
|
|
||||||
|
1. **Response Time Requirements**: The architecture must ensure that the system can respond to user requests within a specified time frame, which may impact design decisions related to caching, database queries, and API performance.
|
||||||
|
2. **Scalability Needs**: The system should be able to handle increased load and user traffic without significant degradation in performance, necessitating a scalable architecture that can grow with demand.
|
||||||
|
|
||||||
## Security Constraints
|
## Security Constraints
|
||||||
|
|
||||||
> e.g., authentication mechanisms, data encryption standards.
|
> e.g., authentication mechanisms, data encryption standards.
|
||||||
|
|||||||
@@ -36,3 +36,22 @@ The architecture encompasses the following key areas:
|
|||||||
10. **Integration Points**: Interfaces for integrating with external systems and services.
|
10. **Integration Points**: Interfaces for integrating with external systems and services.
|
||||||
11. **Monitoring and Logging**: Systems for tracking system performance and user activity.
|
11. **Monitoring and Logging**: Systems for tracking system performance and user activity.
|
||||||
12. **Maintenance and Support**: Processes for ongoing system maintenance and user support.
|
12. **Maintenance and Support**: Processes for ongoing system maintenance and user support.
|
||||||
|
|
||||||
|
## Diagram
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
sequenceDiagram
|
||||||
|
participant PM as Project Manager
|
||||||
|
participant DA as Data Analyst
|
||||||
|
participant EX as Executive
|
||||||
|
participant CM as CalMiner System
|
||||||
|
|
||||||
|
PM->>CM: Create and manage scenarios
|
||||||
|
DA->>CM: Analyze simulation results
|
||||||
|
EX->>CM: Review reports and dashboards
|
||||||
|
CM->>PM: Provide scenario planning tools
|
||||||
|
CM->>DA: Deliver analysis insights
|
||||||
|
CM->>EX: Generate high-level reports
|
||||||
|
```
|
||||||
|
|
||||||
|
This diagram illustrates the key components of the CalMiner system and their interactions with external actors.
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ description: "Explain the static structure: modules, components, services and th
|
|||||||
status: draft
|
status: draft
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<!-- markdownlint-disable-next-line MD025 -->
|
||||||
# 05 — Building Block View
|
# 05 — Building Block View
|
||||||
|
|
||||||
## Architecture overview
|
## Architecture overview
|
||||||
@@ -25,6 +26,7 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
- leveraging a shared dependency module (`routes/dependencies.get_db`) for SQLAlchemy session management.
|
||||||
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
- **Models** (`models/`): SQLAlchemy ORM models representing database tables and relationships, encapsulating domain entities like Scenario, CapEx, OpEx, Consumption, ProductionOutput, Equipment, Maintenance, and SimulationResult.
|
||||||
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
- **Services** (`services/`): business logic layer that processes data, performs calculations, and interacts with models. Key services include reporting calculations and Monte Carlo simulation scaffolding.
|
||||||
|
- `services/settings.py`: manages application settings backed by the `application_setting` table, including CSS variable defaults, persistence, and environment-driven overrides that surface in both the API and UI.
|
||||||
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
- **Database** (`config/database.py`): sets up the SQLAlchemy engine and session management for PostgreSQL interactions.
|
||||||
|
|
||||||
### Frontend
|
### Frontend
|
||||||
@@ -32,6 +34,8 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
- **Templates** (`templates/`): Jinja2 templates for server-rendered HTML views, extending a shared base layout with a persistent sidebar for navigation.
|
||||||
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
- **Static Assets** (`static/`): CSS and JavaScript files for styling and interactivity. Shared CSS variables in `static/css/main.css` define the color palette, while page-specific JS modules in `static/js/` handle dynamic behaviors.
|
||||||
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
- **Reusable partials** (`templates/partials/components.html`): macro library that standardises select inputs, feedback/empty states, and table wrappers so pages remain consistent while keeping DOM hooks stable for existing JavaScript modules.
|
||||||
|
- `templates/settings.html`: Settings hub that renders theme controls and environment override tables using metadata provided by `routes/ui.py`.
|
||||||
|
- `static/js/settings.js`: applies client-side validation, form submission, and live CSS updates for theme changes, respecting environment-managed variables returned by the API.
|
||||||
|
|
||||||
### Middleware & Utilities
|
### Middleware & Utilities
|
||||||
|
|
||||||
@@ -45,6 +49,7 @@ Refer to the detailed architecture chapters in `docs/architecture/`:
|
|||||||
- `consumption.py`, `production_output.py`: operational data tables.
|
- `consumption.py`, `production_output.py`: operational data tables.
|
||||||
- `equipment.py`, `maintenance.py`: asset management models.
|
- `equipment.py`, `maintenance.py`: asset management models.
|
||||||
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
- `simulation_result.py`: stores Monte Carlo iteration outputs.
|
||||||
|
- `application_setting.py`: persists editable application configuration, currently focused on theme variables but designed to store future settings categories.
|
||||||
|
|
||||||
## Service Layer
|
## Service Layer
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,12 @@ The CalMiner application is deployed using a multi-tier architecture consisting
|
|||||||
1. **Client Layer**: This layer consists of web browsers that interact with the application through a user interface rendered by Jinja2 templates and enhanced with JavaScript (Chart.js for dashboards).
|
1. **Client Layer**: This layer consists of web browsers that interact with the application through a user interface rendered by Jinja2 templates and enhanced with JavaScript (Chart.js for dashboards).
|
||||||
2. **Web Application Layer**: This layer hosts the FastAPI application, which handles API requests, business logic, and serves HTML templates. It communicates with the database layer for data persistence.
|
2. **Web Application Layer**: This layer hosts the FastAPI application, which handles API requests, business logic, and serves HTML templates. It communicates with the database layer for data persistence.
|
||||||
3. **Database Layer**: This layer consists of a PostgreSQL database that stores all application data, including scenarios, parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results.
|
3. **Database Layer**: This layer consists of a PostgreSQL database that stores all application data, including scenarios, parameters, costs, consumption, production outputs, equipment, maintenance logs, and simulation results.
|
||||||
4. **Caching Layer**: This layer uses Redis to cache frequently accessed data and improve application performance.
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
A[Client Layer<br/>(Web Browsers)] --> B[Web Application Layer<br/>(FastAPI)]
|
||||||
|
B --> C[Database Layer<br/>(PostgreSQL)]
|
||||||
|
```
|
||||||
|
|
||||||
## Infrastructure Components
|
## Infrastructure Components
|
||||||
|
|
||||||
@@ -29,6 +34,16 @@ The infrastructure components for the application include:
|
|||||||
- **CI/CD Pipeline**: Automated pipelines (Gitea Actions) run tests, build/push Docker images, and trigger deployments.
|
- **CI/CD Pipeline**: Automated pipelines (Gitea Actions) run tests, build/push Docker images, and trigger deployments.
|
||||||
- **Cloud Infrastructure (optional)**: The application can be deployed on cloud platforms.
|
- **Cloud Infrastructure (optional)**: The application can be deployed on cloud platforms.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
A[Web Server] --> B[Database Server]
|
||||||
|
A --> C[Static File Server]
|
||||||
|
A --> D[Reverse Proxy]
|
||||||
|
A --> E[Containerization]
|
||||||
|
A --> F[CI/CD Pipeline]
|
||||||
|
A --> G[Cloud Infrastructure]
|
||||||
|
```
|
||||||
|
|
||||||
## Environments
|
## Environments
|
||||||
|
|
||||||
The application can be deployed in multiple environments to support development, testing, and production:
|
The application can be deployed in multiple environments to support development, testing, and production:
|
||||||
@@ -37,7 +52,7 @@ The application can be deployed in multiple environments to support development,
|
|||||||
|
|
||||||
The development environment is set up for local development and testing. It includes:
|
The development environment is set up for local development and testing. It includes:
|
||||||
|
|
||||||
- Local PostgreSQL instance
|
- Local PostgreSQL instance (docker compose recommended, script available at `docker-compose.postgres.yml`)
|
||||||
- FastAPI server running in debug mode
|
- FastAPI server running in debug mode
|
||||||
|
|
||||||
### Testing Environment
|
### Testing Environment
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ See [Domain Models](08_concepts/08_01_domain_models.md) document for detailed cl
|
|||||||
- `production_output`: production metrics per scenario.
|
- `production_output`: production metrics per scenario.
|
||||||
- `equipment` and `maintenance`: equipment inventory and maintenance events with dates/costs.
|
- `equipment` and `maintenance`: equipment inventory and maintenance events with dates/costs.
|
||||||
- `simulation_result`: staging table for future Monte Carlo outputs (not yet populated by `run_simulation`).
|
- `simulation_result`: staging table for future Monte Carlo outputs (not yet populated by `run_simulation`).
|
||||||
|
- `application_setting`: centralized key/value store for UI and system configuration, supporting typed values, categories, and editability flags so administrators can manage theme variables and future global options without code changes.
|
||||||
|
|
||||||
Foreign keys secure referential integrity between domain tables and their scenarios, enabling per-scenario analytics.
|
Foreign keys secure referential integrity between domain tables and their scenarios, enabling per-scenario analytics.
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ CalMiner uses a combination of unit, integration, and end-to-end tests to ensure
|
|||||||
### CI/CD
|
### CI/CD
|
||||||
|
|
||||||
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
- Use Gitea Actions for CI/CD; workflows live under `.gitea/workflows/`.
|
||||||
- `test.yml` runs on every push with cached Python dependencies via `actions/cache@v3`.
|
- `test.yml` runs on every push, provisions a temporary Postgres 16 service, waits for readiness, executes the setup script in dry-run and live modes, installs Playwright browsers, and finally runs the full pytest suite.
|
||||||
- `build-and-push.yml` builds the Docker image with `docker/build-push-action@v2`, reusing GitHub Actions cache-backed layers, and pushes to the Gitea registry.
|
- `build-and-push.yml` builds the Docker image with `docker/build-push-action@v2`, reusing GitHub Actions cache-backed layers, and pushes to the Gitea registry.
|
||||||
- `deploy.yml` connects to the target host (via `appleboy/ssh-action`) to pull the freshly pushed image and restart the container.
|
- `deploy.yml` connects to the target host (via `appleboy/ssh-action`) to pull the freshly pushed image and restart the container.
|
||||||
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
- Mandatory secrets: `REGISTRY_USERNAME`, `REGISTRY_PASSWORD`, `REGISTRY_URL`, `SSH_HOST`, `SSH_USERNAME`, `SSH_PRIVATE_KEY`.
|
||||||
@@ -99,10 +99,11 @@ pytest tests/e2e/ --headed
|
|||||||
`test.yml` encapsulates the steps below:
|
`test.yml` encapsulates the steps below:
|
||||||
|
|
||||||
- Check out the repository and set up Python 3.10.
|
- Check out the repository and set up Python 3.10.
|
||||||
- Restore the pip cache (keyed by `requirements.txt`).
|
- Configure the runner's apt proxy (if available), install project dependencies (requirements + test extras), and download Playwright browsers.
|
||||||
- Install project dependencies and Playwright browsers (if needed for E2E).
|
|
||||||
- Run `pytest` (extend with `--cov` flags when enforcing coverage).
|
- Run `pytest` (extend with `--cov` flags when enforcing coverage).
|
||||||
|
|
||||||
|
> The pip cache step is temporarily disabled in `test.yml` until the self-hosted cache service is exposed (see `docs/ci-cache-troubleshooting.md`).
|
||||||
|
|
||||||
`build-and-push.yml` adds:
|
`build-and-push.yml` adds:
|
||||||
|
|
||||||
- Registry login using repository secrets.
|
- Registry login using repository secrets.
|
||||||
|
|||||||
27
docs/ci-cache-troubleshooting.md
Normal file
27
docs/ci-cache-troubleshooting.md
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# CI Cache Troubleshooting
|
||||||
|
|
||||||
|
## Background
|
||||||
|
|
||||||
|
The test workflow (`.gitea/workflows/test.yml`) uses the `actions/cache` action to reuse the pip download cache located at `~/.cache/pip`. The cache key now hashes both `requirements.txt` and `requirements-test.txt` so the cache stays aligned with dependency changes.
|
||||||
|
|
||||||
|
## Current Observation
|
||||||
|
|
||||||
|
Recent CI runs report the following warning when the cache step executes:
|
||||||
|
|
||||||
|
```text
|
||||||
|
::warning::Failed to restore: getCacheEntry failed: connect ETIMEDOUT 172.17.0.5:40181
|
||||||
|
Cache not found for input keys: Linux-pip-<hash>, Linux-pip-
|
||||||
|
```
|
||||||
|
|
||||||
|
The timeout indicates the runner cannot reach the cache backend rather than a normal cache miss.
|
||||||
|
|
||||||
|
## Recommended Follow-Up
|
||||||
|
|
||||||
|
- Confirm that the Actions cache service is enabled for the CI environment (Gitea runners require the cache server URL to be provided via `ACTIONS_CACHE_URL` and `ACTIONS_RUNTIME_URL`).
|
||||||
|
- Verify network connectivity from the runner to the cache service endpoint and ensure required ports are open.
|
||||||
|
- After connectivity is restored, rerun the workflow to allow the cache to be populated and confirm subsequent runs restore the cache without warnings.
|
||||||
|
|
||||||
|
## Interim Guidance
|
||||||
|
|
||||||
|
- The workflow will proceed without cached dependencies, but package installs may take longer.
|
||||||
|
- Keep the cache step in place so it begins working automatically once the infrastructure is configured.
|
||||||
31
docs/idempotency_audit.md
Normal file
31
docs/idempotency_audit.md
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Setup Script Idempotency Audit (2025-10-25)
|
||||||
|
|
||||||
|
This note captures the current evaluation of idempotent behaviour for `scripts/setup_database.py` and outlines follow-up actions.
|
||||||
|
|
||||||
|
## Admin Tasks
|
||||||
|
|
||||||
|
- **ensure_database**: guarded by `SELECT 1 FROM pg_database`; re-runs safely. Failure mode: network issues or lack of privileges surface as psycopg2 errors without additional context.
|
||||||
|
- **ensure_role**: checks `pg_roles`, creates role if missing, reapplies grants each time. Subsequent runs execute grants again but PostgreSQL tolerates repeated grants.
|
||||||
|
- **ensure_schema**: uses `information_schema` guard and respects `--dry-run`; idempotent when schema is `public` or already present.
|
||||||
|
|
||||||
|
## Application Tasks
|
||||||
|
|
||||||
|
- **initialize_schema**: relies on SQLAlchemy `create_all(checkfirst=True)`; repeatable. Dry-run output remains descriptive.
|
||||||
|
- **run_migrations**: new baseline workflow applies `000_base.sql` once and records legacy scripts as applied. Subsequent runs detect the baseline in `schema_migrations` and skip reapplication.
|
||||||
|
|
||||||
|
## Seeding
|
||||||
|
|
||||||
|
- `seed_baseline_data` seeds currencies and measurement units with upsert logic. Verification now raises on missing data, preventing silent failures.
|
||||||
|
- Running `--seed-data` repeatedly performs `ON CONFLICT` updates, making the operation safe.
|
||||||
|
|
||||||
|
## Outstanding Risks
|
||||||
|
|
||||||
|
1. Baseline migration relies on legacy files being present when first executed; if removed beforehand, old entries are never marked. (Low risk given repository state.)
|
||||||
|
2. `ensure_database` and `ensure_role` do not wrap SQL execution errors with additional context beyond psycopg2 messages.
|
||||||
|
3. Baseline verification assumes migrations and seeding run in the same process; manual runs of `scripts/seed_data.py` without the baseline could still fail.
|
||||||
|
|
||||||
|
## Recommended Actions
|
||||||
|
|
||||||
|
- Add regression tests ensuring repeated executions of key CLI paths (`--run-migrations`, `--seed-data`) result in no-op behaviour after the first run.
|
||||||
|
- Extend logging/error handling for admin operations to provide clearer messages on repeated failures.
|
||||||
|
- Consider a preflight check when migrations directory lacks legacy files but baseline is pending, warning about potential drift.
|
||||||
29
docs/logging_audit.md
Normal file
29
docs/logging_audit.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Setup Script Logging Audit (2025-10-25)
|
||||||
|
|
||||||
|
The following observations capture current logging behaviour in `scripts/setup_database.py` and highlight areas requiring improved error handling and messaging.
|
||||||
|
|
||||||
|
## Connection Validation
|
||||||
|
|
||||||
|
- `validate_admin_connection` and `validate_application_connection` log entry/exit messages and raise `RuntimeError` with context if connection fails. This coverage is sufficient.
|
||||||
|
- `ensure_database` logs creation states but does not surface connection or SQL exceptions beyond the initial connection acquisition. When the inner `cursor.execute` calls fail, the exceptions bubble without contextual logging.
|
||||||
|
|
||||||
|
## Migration Runner
|
||||||
|
|
||||||
|
- Lists pending migrations and logs each application attempt.
|
||||||
|
- When the baseline is pending, the script logs whether it is a dry-run or live application and records legacy file marking. However, if `_apply_migration_file` raises an exception, the caller re-raises after logging the failure; there is no wrapping message guiding users toward manual cleanup.
|
||||||
|
- Legacy migration marking happens silently (just info logs). Failures during the insert into `schema_migrations` would currently propagate without added guidance.
|
||||||
|
|
||||||
|
## Seeding Workflow
|
||||||
|
|
||||||
|
- `seed_baseline_data` announces each seeding phase and skips verification in dry-run mode with a log breadcrumb.
|
||||||
|
- `_verify_seeded_data` warns about missing currencies/units and inactive defaults but does **not** raise errors, meaning CI can pass while the database is incomplete. There is no explicit log when verification succeeds.
|
||||||
|
- `_seed_units` logs when the `measurement_unit` table is missing, which is helpful, but the warning is the only feedback; no exception is raised.
|
||||||
|
|
||||||
|
## Suggested Enhancements
|
||||||
|
|
||||||
|
1. Wrap baseline application and legacy marking in `try/except` blocks that log actionable remediation steps before re-raising.
|
||||||
|
2. Promote seed verification failures (missing or inactive records) to exceptions so automated workflows fail fast; add success logs for clarity.
|
||||||
|
3. Add contextual logging around currency/measurement-unit insert failures, particularly around `execute_values` calls, to aid debugging malformed data.
|
||||||
|
4. Introduce structured logging (log codes or phases) for major steps (`CONNECT`, `MIGRATE`, `SEED`, `VERIFY`) to make scanning log files easier.
|
||||||
|
|
||||||
|
These findings inform the remaining TODO subtasks for enhanced error handling.
|
||||||
53
docs/migrations/consolidated_baseline_plan.md
Normal file
53
docs/migrations/consolidated_baseline_plan.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Consolidated Migration Baseline Plan
|
||||||
|
|
||||||
|
This note outlines the content and structure of the planned baseline migration (`scripts/migrations/000_base.sql`). The objective is to capture the currently required schema changes in a single idempotent script so that fresh environments only need to apply one SQL file before proceeding with incremental migrations.
|
||||||
|
|
||||||
|
## Guiding Principles
|
||||||
|
|
||||||
|
1. **Idempotent DDL**: Every `CREATE` or `ALTER` statement must tolerate repeated execution. Use `IF NOT EXISTS` guards or existence checks (`information_schema`) where necessary.
|
||||||
|
2. **Order of Operations**: Create reference tables first, then update dependent tables, finally enforce foreign keys and constraints.
|
||||||
|
3. **Data Safety**: Default data seeded by migrations should be minimal and in ASCII-only form to avoid encoding issues in various shells and CI logs.
|
||||||
|
4. **Compatibility**: The baseline must reflect the schema shape expected by the current SQLAlchemy models, API routes, and seeding scripts.
|
||||||
|
|
||||||
|
## Schema Elements to Include
|
||||||
|
|
||||||
|
### 1. `currency` Table
|
||||||
|
|
||||||
|
- Columns: `id SERIAL PRIMARY KEY`, `code VARCHAR(3) UNIQUE NOT NULL`, `name VARCHAR(128) NOT NULL`, `symbol VARCHAR(8)`, `is_active BOOLEAN NOT NULL DEFAULT TRUE`.
|
||||||
|
- Index: implicit via unique constraint on `code`.
|
||||||
|
- Seed rows matching `scripts.seed_data.CURRENCY_SEEDS` (ASCII-only symbols such as `USD$`, `CAD$`).
|
||||||
|
- Upsert logic using `ON CONFLICT (code) DO UPDATE` to keep names/symbols in sync when rerun.
|
||||||
|
|
||||||
|
### 2. Currency Integration for CAPEX/OPEX
|
||||||
|
|
||||||
|
- Add `currency_id INTEGER` columns with `IF NOT EXISTS` guards.
|
||||||
|
- Populate `currency_id` from legacy `currency_code` if the column exists.
|
||||||
|
- Default null `currency_id` values to the USD row, then `ALTER` to `SET NOT NULL`.
|
||||||
|
- Create `fk_capex_currency` and `fk_opex_currency` constraints with `ON DELETE RESTRICT` semantics.
|
||||||
|
- Drop legacy `currency_code` column if it exists (safe because new column holds data).
|
||||||
|
|
||||||
|
### 3. Measurement Metadata on Consumption/Production
|
||||||
|
|
||||||
|
- Ensure `consumption` and `production_output` tables have `unit_name VARCHAR(64)` and `unit_symbol VARCHAR(16)` columns with `IF NOT EXISTS` guards.
|
||||||
|
|
||||||
|
### 4. `measurement_unit` Reference Table
|
||||||
|
|
||||||
|
- Columns: `id SERIAL PRIMARY KEY`, `code VARCHAR(64) UNIQUE NOT NULL`, `name VARCHAR(128) NOT NULL`, `symbol VARCHAR(16)`, `unit_type VARCHAR(32) NOT NULL`, `is_active BOOLEAN NOT NULL DEFAULT TRUE`, `created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()`, `updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()`.
|
||||||
|
- Assume a simple trigger to maintain `updated_at` is deferred: automate via application layer later; for now, omit trigger.
|
||||||
|
- Seed rows matching `MEASUREMENT_UNIT_SEEDS` (ASCII names/symbols). Use `ON CONFLICT (code) DO UPDATE` to keep descriptive fields aligned.
|
||||||
|
|
||||||
|
### 5. Transaction Handling
|
||||||
|
|
||||||
|
- Wrap the main operations in a single `BEGIN; ... COMMIT;` block.
|
||||||
|
- Use subtransactions (`DO $$ ... $$;`) only where conditional logic is required (e.g., checking column existence before backfill).
|
||||||
|
|
||||||
|
## Migration Tracking Alignment
|
||||||
|
|
||||||
|
- Baseline file will be named `000_base.sql`. After execution, insert a row into `schema_migrations` with filename `000_base.sql` to keep the tracking table aligned.
|
||||||
|
- Existing migrations (`20251021_add_currency_and_unit_fields.sql`, `20251022_create_currency_table_and_fks.sql`) remain for historical reference but will no longer be applied to new environments once the baseline is present.
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. Draft `000_base.sql` reflecting the steps above.
|
||||||
|
2. Update `run_migrations` to recognise the baseline file and mark older migrations as applied when the baseline exists.
|
||||||
|
3. Provide documentation in `docs/quickstart.md` explaining how to reset an environment using the baseline plus seeds.
|
||||||
@@ -52,6 +52,15 @@ If you maintain a Postgres or Redis dependency locally, consider authoring a `do
|
|||||||
- **API base URL**: `http://localhost:8000/api`
|
- **API base URL**: `http://localhost:8000/api`
|
||||||
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
- Key routes include creating scenarios, parameters, costs, consumption, production, equipment, maintenance, and reporting summaries. See the `routes/` directory for full details.
|
||||||
|
|
||||||
|
### Theme configuration
|
||||||
|
|
||||||
|
- Open `/ui/settings` to access the Settings dashboard. The **Theme Colors** form lists every CSS variable persisted in the `application_setting` table. Updates apply immediately across the UI once saved.
|
||||||
|
- Use the accompanying API endpoints for automation or integration tests:
|
||||||
|
- `GET /api/settings/css` returns the active variables, defaults, and metadata describing any environment overrides.
|
||||||
|
- `PUT /api/settings/css` accepts a payload such as `{"variables": {"--color-primary": "#112233"}}` and persists the change unless an environment override is in place.
|
||||||
|
- Environment variables prefixed with `CALMINER_THEME_` win over database values. For example, setting `CALMINER_THEME_COLOR_PRIMARY="#112233"` renders the corresponding input read-only and surfaces the override in the Environment Overrides table.
|
||||||
|
- Acceptable values include hex (`#rrggbb` or `#rrggbbaa`), `rgb()/rgba()`, and `hsl()/hsla()` expressions with the expected number of components. Invalid inputs trigger a validation error and the API responds with HTTP 422.
|
||||||
|
|
||||||
## Dashboard Preview
|
## Dashboard Preview
|
||||||
|
|
||||||
1. Start the FastAPI server and navigate to `/`.
|
1. Start the FastAPI server and navigate to `/`.
|
||||||
@@ -68,13 +77,11 @@ pytest
|
|||||||
|
|
||||||
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
E2E tests use Playwright and a session-scoped `live_server` fixture that starts the app at `http://localhost:8001` for browser-driven tests.
|
||||||
|
|
||||||
## Migrations & Currency Backfill
|
## Migrations & Baseline
|
||||||
|
|
||||||
The project includes a referential `currency` table and migration/backfill tooling to normalize legacy currency fields.
|
A consolidated baseline migration (`scripts/migrations/000_base.sql`) captures all schema changes required for a fresh installation. The script is idempotent: it creates the `currency` and `measurement_unit` reference tables, provisions the `application_setting` store for configurable UI/system options, ensures consumption and production records expose unit metadata, and enforces the foreign keys used by CAPEX and OPEX.
|
||||||
|
|
||||||
### Run migrations and backfill (development)
|
Configure granular database settings in your PowerShell session before running migrations:
|
||||||
|
|
||||||
Configure the granular database settings in your PowerShell session before running migrations.
|
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
$env:DATABASE_DRIVER = 'postgresql'
|
$env:DATABASE_DRIVER = 'postgresql'
|
||||||
@@ -84,14 +91,144 @@ $env:DATABASE_USER = 'calminer'
|
|||||||
$env:DATABASE_PASSWORD = 's3cret'
|
$env:DATABASE_PASSWORD = 's3cret'
|
||||||
$env:DATABASE_NAME = 'calminer'
|
$env:DATABASE_NAME = 'calminer'
|
||||||
$env:DATABASE_SCHEMA = 'public'
|
$env:DATABASE_SCHEMA = 'public'
|
||||||
python scripts/run_migrations.py
|
python scripts/setup_database.py --run-migrations --seed-data --dry-run
|
||||||
python scripts/backfill_currency.py --dry-run
|
python scripts/setup_database.py --run-migrations --seed-data
|
||||||
python scripts/backfill_currency.py --create-missing
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The dry-run invocation reports which steps would execute without making changes. The live run applies the baseline (if not already recorded in `schema_migrations`) and seeds the reference data relied upon by the UI and API.
|
||||||
|
|
||||||
|
> ℹ️ When `--seed-data` is supplied without `--run-migrations`, the bootstrap script automatically applies any pending SQL migrations first so the `application_setting` table (and future settings-backed features) are present before seeding.
|
||||||
|
|
||||||
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
> ℹ️ The application still accepts `DATABASE_URL` as a fallback if the granular variables are not set.
|
||||||
|
|
||||||
Use `--dry-run` first to verify what will change.
|
## Database bootstrap workflow
|
||||||
|
|
||||||
|
Provision or refresh a database instance with `scripts/setup_database.py`. Populate the required environment variables (an example lives at `config/setup_test.env.example`) and run:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Load test credentials (PowerShell)
|
||||||
|
Get-Content .\config\setup_test.env.example |
|
||||||
|
ForEach-Object {
|
||||||
|
if ($_ -and -not $_.StartsWith('#')) {
|
||||||
|
$name, $value = $_ -split '=', 2
|
||||||
|
Set-Item -Path Env:$name -Value $value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Dry-run to inspect the planned actions
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
||||||
|
|
||||||
|
# Execute the full workflow
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
||||||
|
```
|
||||||
|
|
||||||
|
Typical log output confirms:
|
||||||
|
|
||||||
|
- Admin and application connections succeed for the supplied credentials.
|
||||||
|
- Database and role creation are idempotent (`already present` when rerun).
|
||||||
|
- SQLAlchemy metadata either reports missing tables or `All tables already exist`.
|
||||||
|
- Migrations list pending files and finish with `Applied N migrations` (a new database reports `Applied 1 migrations` for `000_base.sql`).
|
||||||
|
|
||||||
|
After a successful run the target database contains all application tables plus `schema_migrations`, and that table records each applied migration file. New installations only record `000_base.sql`; upgraded environments retain historical entries alongside the baseline.
|
||||||
|
|
||||||
|
### Local Postgres via Docker Compose
|
||||||
|
|
||||||
|
For local validation without installing Postgres directly, use the provided compose file:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
docker compose -f docker-compose.postgres.yml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Summary
|
||||||
|
|
||||||
|
1. Start the Postgres container with `docker compose -f docker-compose.postgres.yml up -d`.
|
||||||
|
2. Export the granular database environment variables (host `127.0.0.1`, port `5433`, database `calminer_local`, user/password `calminer`/`secret`).
|
||||||
|
3. Run the setup script twice: first with `--dry-run` to preview actions, then without it to apply changes.
|
||||||
|
4. When finished, stop and optionally remove the container/volume using `docker compose -f docker-compose.postgres.yml down`.
|
||||||
|
|
||||||
|
The service exposes Postgres 16 on `localhost:5433` with database `calminer_local` and role `calminer`/`secret`. When the container is running, set the granular environment variables before invoking the setup script:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
$env:DATABASE_DRIVER = 'postgresql'
|
||||||
|
$env:DATABASE_HOST = '127.0.0.1'
|
||||||
|
$env:DATABASE_PORT = '5433'
|
||||||
|
$env:DATABASE_USER = 'calminer'
|
||||||
|
$env:DATABASE_PASSWORD = 'secret'
|
||||||
|
$env:DATABASE_NAME = 'calminer_local'
|
||||||
|
$env:DATABASE_SCHEMA = 'public'
|
||||||
|
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v
|
||||||
|
```
|
||||||
|
|
||||||
|
When testing is complete, shut down the container (and optional persistent volume) with:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
docker compose -f docker-compose.postgres.yml down
|
||||||
|
docker volume rm calminer_postgres_local_postgres_data # optional cleanup
|
||||||
|
```
|
||||||
|
|
||||||
|
Document successful runs (or issues encountered) in `.github/instructions/DONE.TODO.md` for future reference.
|
||||||
|
|
||||||
|
### Seeding reference data
|
||||||
|
|
||||||
|
`scripts/seed_data.py` provides targeted control over the baseline datasets when the full setup script is not required:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/seed_data.py --currencies --units --dry-run
|
||||||
|
python scripts/seed_data.py --currencies --units
|
||||||
|
```
|
||||||
|
|
||||||
|
The seeder upserts the canonical currency catalog (`USD`, `EUR`, `CLP`, `RMB`, `GBP`, `CAD`, `AUD`) using ASCII-safe symbols (`USD$`, `EUR`, etc.) and the measurement units referenced by the UI (`tonnes`, `kilograms`, `pounds`, `liters`, `cubic_meters`, `kilowatt_hours`). The setup script invokes the same seeder when `--seed-data` is provided and verifies the expected rows afterward, warning if any are missing or inactive.
|
||||||
|
|
||||||
|
### Rollback guidance
|
||||||
|
|
||||||
|
`scripts/setup_database.py` now tracks compensating actions when it creates the database or application role. If a later step fails, the script replays those rollback actions (dropping the newly created database or role and revoking grants) before exiting. Dry runs never register rollback steps and remain read-only.
|
||||||
|
|
||||||
|
If the script reports that some rollback steps could not complete—for example because a connection cannot be established—rerun the script with `--dry-run` to confirm the desired end state and then apply the outstanding cleanup manually:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --dry-run -v
|
||||||
|
|
||||||
|
# Manual cleanup examples when automation cannot connect
|
||||||
|
psql -d postgres -c "DROP DATABASE IF EXISTS calminer"
|
||||||
|
psql -d postgres -c "DROP ROLE IF EXISTS calminer"
|
||||||
|
```
|
||||||
|
|
||||||
|
After a failure and rollback, rerun the full setup once the environment issues are resolved.
|
||||||
|
|
||||||
|
### CI pipeline environment
|
||||||
|
|
||||||
|
The `.gitea/workflows/test.yml` job spins up a temporary PostgreSQL 16 container and runs the setup script twice: once with `--dry-run` to validate the plan and again without it to apply migrations and seeds. No external secrets are required; the workflow sets the following environment variables for both invocations and for pytest:
|
||||||
|
|
||||||
|
| Variable | Value | Purpose |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `DATABASE_DRIVER` | `postgresql` | Signals the driver to the setup script |
|
||||||
|
| `DATABASE_HOST` | `postgres` | Hostname of the Postgres job service container |
|
||||||
|
| `DATABASE_PORT` | `5432` | Default service port |
|
||||||
|
| `DATABASE_NAME` | `calminer_ci` | Target database created by the workflow |
|
||||||
|
| `DATABASE_USER` | `calminer` | Application role used during tests |
|
||||||
|
| `DATABASE_PASSWORD` | `secret` | Password for both admin and app role |
|
||||||
|
| `DATABASE_SCHEMA` | `public` | Default schema for the tests |
|
||||||
|
| `DATABASE_SUPERUSER` | `calminer` | Setup script uses the same role for admin actions |
|
||||||
|
| `DATABASE_SUPERUSER_PASSWORD` | `secret` | Matches the Postgres service password |
|
||||||
|
| `DATABASE_SUPERUSER_DB` | `calminer_ci` | Database to connect to for admin operations |
|
||||||
|
|
||||||
|
The workflow also updates `DATABASE_URL` for pytest to point at the CI Postgres instance. Existing tests continue to work unchanged, since SQLAlchemy reads the URL exactly as it does locally.
|
||||||
|
|
||||||
|
Because the workflow provisions everything inline, no repository or organization secrets need to be configured for basic CI runs. If you later move the setup step to staging or production pipelines, replace these inline values with secrets managed by the CI platform. When running on self-hosted runners behind an HTTP proxy or apt cache, ensure Playwright dependencies and OS packages inherit the same proxy settings that the workflow configures prior to installing browsers.
|
||||||
|
|
||||||
|
### Staging environment workflow
|
||||||
|
|
||||||
|
Use the staging checklist in `docs/staging_environment_setup.md` when running the setup script against the shared environment. A sample variable file (`config/setup_staging.env`) records the expected inputs (host, port, admin/application roles); copy it outside the repository or load the values securely via your shell before executing the workflow.
|
||||||
|
|
||||||
|
Recommended execution order:
|
||||||
|
|
||||||
|
1. Dry run with `--dry-run -v` to confirm connectivity and review planned operations. Capture the output to `reports/setup_staging_dry_run.log` (or similar) for auditing.
|
||||||
|
2. Execute the live run with the same flags minus `--dry-run` to provision the database, role grants, migrations, and seed data. Save the log as `reports/setup_staging_apply.log`.
|
||||||
|
3. Repeat the dry run to verify idempotency and record the result (for example `reports/setup_staging_post_apply.log`).
|
||||||
|
|
||||||
|
Record any issues in `.github/instructions/TODO.md` or `.github/instructions/DONE.TODO.md` as appropriate so the team can track follow-up actions.
|
||||||
|
|
||||||
## Database Objects
|
## Database Objects
|
||||||
|
|
||||||
|
|||||||
78
docs/seed_data_plan.md
Normal file
78
docs/seed_data_plan.md
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
# Baseline Seed Data Plan
|
||||||
|
|
||||||
|
This document captures the datasets that should be present in a fresh CalMiner installation and the structure required to manage them through `scripts/seed_data.py`.
|
||||||
|
|
||||||
|
## Currency Catalog
|
||||||
|
|
||||||
|
The `currency` table already exists and is seeded today via `scripts/seed_data.py`. The goal is to keep the canonical list in one place and ensure the default currency (USD) is always active.
|
||||||
|
|
||||||
|
| Code | Name | Symbol | Notes |
|
||||||
|
| ---- | ------------------- | ------ | ---------------------------------------- |
|
||||||
|
| USD | US Dollar | $ | Default currency (`DEFAULT_CURRENCY_CODE`) |
|
||||||
|
| EUR | Euro | EUR symbol | |
|
||||||
|
| CLP | Chilean Peso | $ | |
|
||||||
|
| RMB | Chinese Yuan | RMB symbol | |
|
||||||
|
| GBP | British Pound | GBP symbol | |
|
||||||
|
| CAD | Canadian Dollar | $ | |
|
||||||
|
| AUD | Australian Dollar | $ | |
|
||||||
|
|
||||||
|
Seeding behaviour:
|
||||||
|
|
||||||
|
- Upsert by ISO code; keep existing name/symbol when updated manually.
|
||||||
|
- Ensure `is_active` remains true for USD and defaults to true for new rows.
|
||||||
|
- Defer to runtime validation in `routes.currencies` for enforcing default behaviour.
|
||||||
|
|
||||||
|
## Measurement Units
|
||||||
|
|
||||||
|
UI routes (`routes/ui.py`) currently rely on the in-memory `MEASUREMENT_UNITS` list to populate dropdowns for consumption and production forms. To make this configurable and available to the API, introduce a dedicated `measurement_unit` table and seed it.
|
||||||
|
|
||||||
|
Proposed schema:
|
||||||
|
|
||||||
|
| Column | Type | Notes |
|
||||||
|
| ------------- | -------------- | ------------------------------------ |
|
||||||
|
| id | SERIAL / BIGINT | Primary key. |
|
||||||
|
| code | TEXT | Stable slug (e.g. `tonnes`). Unique. |
|
||||||
|
| name | TEXT | Display label. |
|
||||||
|
| symbol | TEXT | Short symbol (nullable). |
|
||||||
|
| unit_type | TEXT | Category (`mass`, `volume`, `energy`).|
|
||||||
|
| is_active | BOOLEAN | Default `true` for soft disabling. |
|
||||||
|
| created_at | TIMESTAMP | Optional `NOW()` default. |
|
||||||
|
| updated_at | TIMESTAMP | Optional `NOW()` trigger/default. |
|
||||||
|
|
||||||
|
Initial seed set (mirrors existing UI list plus type categorisation):
|
||||||
|
|
||||||
|
| Code | Name | Symbol | Unit Type |
|
||||||
|
| --------------- | ---------------- | ------ | --------- |
|
||||||
|
| tonnes | Tonnes | t | mass |
|
||||||
|
| kilograms | Kilograms | kg | mass |
|
||||||
|
| pounds | Pounds | lb | mass |
|
||||||
|
| liters | Liters | L | volume |
|
||||||
|
| cubic_meters | Cubic Meters | m3 | volume |
|
||||||
|
| kilowatt_hours | Kilowatt Hours | kWh | energy |
|
||||||
|
|
||||||
|
Seeding behaviour:
|
||||||
|
|
||||||
|
- Upsert rows by `code`.
|
||||||
|
- Preserve `unit_type` and `symbol` unless explicitly changed via administration tooling.
|
||||||
|
- Continue surfacing unit options to the UI by querying this table instead of the static constant.
|
||||||
|
|
||||||
|
## Default Settings
|
||||||
|
|
||||||
|
The application expects certain defaults to exist:
|
||||||
|
|
||||||
|
- **Default currency**: enforced by `routes.currencies._ensure_default_currency`; ensure seeds keep USD active.
|
||||||
|
- **Fallback measurement unit**: UI currently auto-selects the first option in the list. Once units move to the database, expose an application setting to choose a fallback (future work tracked under "Application Settings management").
|
||||||
|
|
||||||
|
## Seeding Structure Updates
|
||||||
|
|
||||||
|
To support the datasets above:
|
||||||
|
|
||||||
|
1. Extend `scripts/seed_data.py` with a `SeedDataset` registry so each dataset (currencies, units, future defaults) can declare its loader/upsert function and optional dependencies.
|
||||||
|
2. Add a `--dataset` CLI selector for targeted seeding while keeping `--all` as the default for `setup_database.py` integrations.
|
||||||
|
3. Update `scripts/setup_database.py` to:
|
||||||
|
- Run migration ensuring `measurement_unit` table exists.
|
||||||
|
- Execute the unit seeder after currencies when `--seed-data` is supplied.
|
||||||
|
- Verify post-seed counts, logging which dataset was inserted/updated.
|
||||||
|
4. Adjust UI routes to load measurement units from the database and remove the hard-coded list once the table is available.
|
||||||
|
|
||||||
|
This plan aligns with the TODO item for seeding initial data and lays the groundwork for consolidating migrations around a single baseline file that introduces both the schema and seed data in an idempotent manner.
|
||||||
101
docs/staging_environment_setup.md
Normal file
101
docs/staging_environment_setup.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Staging Environment Setup
|
||||||
|
|
||||||
|
This guide outlines how to provision and validate the CalMiner staging database using `scripts/setup_database.py`. It complements the local and CI-focused instructions in `docs/quickstart.md`.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Network access to the staging infrastructure (VPN or bastion, as required by ops).
|
||||||
|
- Provisioned PostgreSQL instance with superuser or delegated admin credentials for maintenance.
|
||||||
|
- Application credentials (role + password) dedicated to CalMiner staging.
|
||||||
|
- The application repository checked out with Python dependencies installed (`pip install -r requirements.txt`).
|
||||||
|
- Optional but recommended: a writable directory (for example `reports/`) to capture setup logs.
|
||||||
|
|
||||||
|
> Replace the placeholder values in the examples below with the actual host, port, and credential details supplied by ops.
|
||||||
|
|
||||||
|
## Environment Configuration
|
||||||
|
|
||||||
|
Populate the following environment variables before invoking the setup script. Store them in a secure location such as `config/setup_staging.env` (excluded from source control) and load them with `dotenv` or your shell profile.
|
||||||
|
|
||||||
|
| Variable | Description |
|
||||||
|
| --- | --- |
|
||||||
|
| `DATABASE_HOST` | Staging PostgreSQL hostname or IP (for example `staging-db.internal`). |
|
||||||
|
| `DATABASE_PORT` | Port exposed by the staging PostgreSQL service (default `5432`). |
|
||||||
|
| `DATABASE_NAME` | CalMiner staging database name (for example `calminer_staging`). |
|
||||||
|
| `DATABASE_USER` | Application role used by the FastAPI app (for example `calminer_app`). |
|
||||||
|
| `DATABASE_PASSWORD` | Password for the application role. |
|
||||||
|
| `DATABASE_SCHEMA` | Optional non-public schema; omit or set to `public` otherwise. |
|
||||||
|
| `DATABASE_SUPERUSER` | Administrative role with rights to create roles/databases (for example `calminer_admin`). |
|
||||||
|
| `DATABASE_SUPERUSER_PASSWORD` | Password for the administrative role. |
|
||||||
|
| `DATABASE_SUPERUSER_DB` | Database to connect to for admin tasks (default `postgres`). |
|
||||||
|
| `DATABASE_ADMIN_URL` | Optional DSN that overrides the granular admin settings above. |
|
||||||
|
|
||||||
|
You may also set `DATABASE_URL` for application runtime convenience, but the setup script only requires the values listed in the table.
|
||||||
|
|
||||||
|
### Loading Variables (PowerShell example)
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
$env:DATABASE_HOST = "staging-db.internal"
|
||||||
|
$env:DATABASE_PORT = "5432"
|
||||||
|
$env:DATABASE_NAME = "calminer_staging"
|
||||||
|
$env:DATABASE_USER = "calminer_app"
|
||||||
|
$env:DATABASE_PASSWORD = "<app-password>"
|
||||||
|
$env:DATABASE_SUPERUSER = "calminer_admin"
|
||||||
|
$env:DATABASE_SUPERUSER_PASSWORD = "<admin-password>"
|
||||||
|
$env:DATABASE_SUPERUSER_DB = "postgres"
|
||||||
|
```
|
||||||
|
|
||||||
|
For bash shells, export the same variables using `export VARIABLE=value` or load them through `dotenv`.
|
||||||
|
|
||||||
|
## Setup Workflow
|
||||||
|
|
||||||
|
Run the setup script in three phases to validate idempotency and capture diagnostics:
|
||||||
|
|
||||||
|
1. **Dry run (diagnostic):**
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v `
|
||||||
|
2>&1 | Tee-Object -FilePath reports/setup_staging_dry_run.log
|
||||||
|
```
|
||||||
|
|
||||||
|
Confirm that the script reports planned actions without failures. If the application role is missing, a dry run will log skip messages until a live run creates the role.
|
||||||
|
|
||||||
|
2. **Apply changes:**
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data -v `
|
||||||
|
2>&1 | Tee-Object -FilePath reports/setup_staging_apply.log
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify the log for successful database creation, role grants, migration execution, and seed verification.
|
||||||
|
|
||||||
|
3. **Post-apply dry run:**
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
python scripts/setup_database.py --ensure-database --ensure-role --ensure-schema --initialize-schema --run-migrations --seed-data --dry-run -v `
|
||||||
|
2>&1 | Tee-Object -FilePath reports/setup_staging_post_apply.log
|
||||||
|
```
|
||||||
|
|
||||||
|
This run should confirm that all schema objects, migrations, and seed data are already in place.
|
||||||
|
|
||||||
|
## Validation Checklist
|
||||||
|
|
||||||
|
- [ ] Confirm the staging application can connect using the application DSN (for example, run `pytest tests/e2e/test_smoke.py` against staging or trigger a smoke test workflow).
|
||||||
|
- [ ] Inspect `schema_migrations` to ensure the baseline migration (`000_base.sql`) is recorded.
|
||||||
|
- [ ] Spot-check seeded reference data (`currency`, `measurement_unit`) for correctness.
|
||||||
|
- [ ] Capture and archive the three setup logs in a shared location for audit purposes.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
- If the dry run reports skipped actions because the application role does not exist, proceed with the live run; subsequent dry runs will validate as expected.
|
||||||
|
- Connection errors usually stem from network restrictions or incorrect credentials. Validate reachability with `psql` or `pg_isready` using the same host/port and credentials.
|
||||||
|
- For permission issues during migrations or seeding, confirm the admin role has rights on the target database and that the application role inherits the expected privileges.
|
||||||
|
|
||||||
|
## Rollback Guidance
|
||||||
|
|
||||||
|
- Database creation and role grants register rollback actions when not running in dry-run mode. If a later step fails, rerun the script without `--dry-run`; it will automatically revoke grants or drop newly created resources as part of the rollback routine.
|
||||||
|
- For staged environments where manual intervention is required, coordinate with ops before dropping databases or roles.
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
- Keep this document updated as staging infrastructure evolves (for example, when migrating to managed services or rotating credentials).
|
||||||
|
- Once staging validation is complete, summarize the outcome in `.github/instructions/DONE.TODO.md` and cross-link the relevant log files.
|
||||||
4
main.py
4
main.py
@@ -13,8 +13,10 @@ from routes.consumption import router as consumption_router
|
|||||||
from routes.production import router as production_router
|
from routes.production import router as production_router
|
||||||
from routes.equipment import router as equipment_router
|
from routes.equipment import router as equipment_router
|
||||||
from routes.reporting import router as reporting_router
|
from routes.reporting import router as reporting_router
|
||||||
|
from routes.currencies import router as currencies_router
|
||||||
from routes.simulations import router as simulations_router
|
from routes.simulations import router as simulations_router
|
||||||
from routes.maintenance import router as maintenance_router
|
from routes.maintenance import router as maintenance_router
|
||||||
|
from routes.settings import router as settings_router
|
||||||
|
|
||||||
# Initialize database schema
|
# Initialize database schema
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
@@ -41,4 +43,6 @@ app.include_router(production_router)
|
|||||||
app.include_router(equipment_router)
|
app.include_router(equipment_router)
|
||||||
app.include_router(maintenance_router)
|
app.include_router(maintenance_router)
|
||||||
app.include_router(reporting_router)
|
app.include_router(reporting_router)
|
||||||
|
app.include_router(currencies_router)
|
||||||
|
app.include_router(settings_router)
|
||||||
app.include_router(ui_router)
|
app.include_router(ui_router)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""
|
"""
|
||||||
models package initializer. Import the currency model so it's registered
|
models package initializer. Import key models so they're registered
|
||||||
with the shared Base.metadata when the package is imported by tests.
|
with the shared Base.metadata when the package is imported by tests.
|
||||||
"""
|
"""
|
||||||
|
from . import application_setting # noqa: F401
|
||||||
from . import currency # noqa: F401
|
from . import currency # noqa: F401
|
||||||
|
|||||||
29
models/application_setting.py
Normal file
29
models/application_setting.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, DateTime, String, Text
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from config.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationSetting(Base):
|
||||||
|
__tablename__ = "application_setting"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, index=True)
|
||||||
|
key: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
|
||||||
|
value: Mapped[str] = mapped_column(Text, nullable=False)
|
||||||
|
value_type: Mapped[str] = mapped_column(String(32), nullable=False, default="string")
|
||||||
|
category: Mapped[str] = mapped_column(String(32), nullable=False, default="general")
|
||||||
|
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||||
|
is_editable: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<ApplicationSetting key={self.key} category={self.category}>"
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
from typing import List, Dict, Any
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
from models.currency import Currency
|
from models.currency import Currency
|
||||||
from routes.dependencies import get_db
|
from routes.dependencies import get_db
|
||||||
@@ -9,11 +11,181 @@ from routes.dependencies import get_db
|
|||||||
router = APIRouter(prefix="/api/currencies", tags=["Currencies"])
|
router = APIRouter(prefix="/api/currencies", tags=["Currencies"])
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[Dict[str, Any]])
|
DEFAULT_CURRENCY_CODE = "USD"
|
||||||
def list_currencies(db: Session = Depends(get_db)):
|
DEFAULT_CURRENCY_NAME = "US Dollar"
|
||||||
results = []
|
DEFAULT_CURRENCY_SYMBOL = "$"
|
||||||
for c in db.query(Currency).filter_by(is_active=True).order_by(Currency.code).all():
|
|
||||||
results.append({"id": c.code, "name": f"{c.name} ({c.code})", "symbol": c.symbol})
|
|
||||||
if not results:
|
class CurrencyBase(BaseModel):
|
||||||
results.append({"id": "USD", "name": "US Dollar (USD)", "symbol": "$"})
|
name: str = Field(..., min_length=1, max_length=128)
|
||||||
return results
|
symbol: Optional[str] = Field(default=None, max_length=8)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_symbol(value: Optional[str]) -> Optional[str]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
value = value.strip()
|
||||||
|
return value or None
|
||||||
|
|
||||||
|
@field_validator("name")
|
||||||
|
@classmethod
|
||||||
|
def _strip_name(cls, value: str) -> str:
|
||||||
|
return value.strip()
|
||||||
|
|
||||||
|
@field_validator("symbol")
|
||||||
|
@classmethod
|
||||||
|
def _strip_symbol(cls, value: Optional[str]) -> Optional[str]:
|
||||||
|
return cls._normalize_symbol(value)
|
||||||
|
|
||||||
|
|
||||||
|
class CurrencyCreate(CurrencyBase):
|
||||||
|
code: str = Field(..., min_length=3, max_length=3)
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
@field_validator("code")
|
||||||
|
@classmethod
|
||||||
|
def _normalize_code(cls, value: str) -> str:
|
||||||
|
return value.strip().upper()
|
||||||
|
|
||||||
|
|
||||||
|
class CurrencyUpdate(CurrencyBase):
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CurrencyActivation(BaseModel):
|
||||||
|
is_active: bool
|
||||||
|
|
||||||
|
|
||||||
|
class CurrencyRead(CurrencyBase):
|
||||||
|
id: int
|
||||||
|
code: str
|
||||||
|
is_active: bool
|
||||||
|
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_default_currency(db: Session) -> Currency:
|
||||||
|
existing = (
|
||||||
|
db.query(Currency)
|
||||||
|
.filter(Currency.code == DEFAULT_CURRENCY_CODE)
|
||||||
|
.one_or_none()
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
return existing
|
||||||
|
|
||||||
|
default_currency = Currency(
|
||||||
|
code=DEFAULT_CURRENCY_CODE,
|
||||||
|
name=DEFAULT_CURRENCY_NAME,
|
||||||
|
symbol=DEFAULT_CURRENCY_SYMBOL,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
db.add(default_currency)
|
||||||
|
try:
|
||||||
|
db.commit()
|
||||||
|
except IntegrityError:
|
||||||
|
db.rollback()
|
||||||
|
existing = (
|
||||||
|
db.query(Currency)
|
||||||
|
.filter(Currency.code == DEFAULT_CURRENCY_CODE)
|
||||||
|
.one()
|
||||||
|
)
|
||||||
|
return existing
|
||||||
|
db.refresh(default_currency)
|
||||||
|
return default_currency
|
||||||
|
|
||||||
|
|
||||||
|
def _get_currency_or_404(db: Session, code: str) -> Currency:
|
||||||
|
normalized = code.strip().upper()
|
||||||
|
currency = (
|
||||||
|
db.query(Currency)
|
||||||
|
.filter(Currency.code == normalized)
|
||||||
|
.one_or_none()
|
||||||
|
)
|
||||||
|
if currency is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail="Currency not found")
|
||||||
|
return currency
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[CurrencyRead])
|
||||||
|
def list_currencies(
|
||||||
|
include_inactive: bool = Query(
|
||||||
|
False, description="Include inactive currencies"),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
_ensure_default_currency(db)
|
||||||
|
query = db.query(Currency)
|
||||||
|
if not include_inactive:
|
||||||
|
query = query.filter(Currency.is_active.is_(True))
|
||||||
|
currencies = query.order_by(Currency.code).all()
|
||||||
|
return currencies
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/", response_model=CurrencyRead, status_code=status.HTTP_201_CREATED)
|
||||||
|
def create_currency(payload: CurrencyCreate, db: Session = Depends(get_db)):
|
||||||
|
code = payload.code
|
||||||
|
existing = (
|
||||||
|
db.query(Currency)
|
||||||
|
.filter(Currency.code == code)
|
||||||
|
.one_or_none()
|
||||||
|
)
|
||||||
|
if existing is not None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail=f"Currency '{code}' already exists",
|
||||||
|
)
|
||||||
|
|
||||||
|
currency = Currency(
|
||||||
|
code=code,
|
||||||
|
name=payload.name,
|
||||||
|
symbol=CurrencyBase._normalize_symbol(payload.symbol),
|
||||||
|
is_active=payload.is_active,
|
||||||
|
)
|
||||||
|
db.add(currency)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(currency)
|
||||||
|
return currency
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{code}", response_model=CurrencyRead)
|
||||||
|
def update_currency(code: str, payload: CurrencyUpdate, db: Session = Depends(get_db)):
|
||||||
|
currency = _get_currency_or_404(db, code)
|
||||||
|
|
||||||
|
if payload.name is not None:
|
||||||
|
setattr(currency, "name", payload.name)
|
||||||
|
if payload.symbol is not None or payload.symbol == "":
|
||||||
|
setattr(
|
||||||
|
currency,
|
||||||
|
"symbol",
|
||||||
|
CurrencyBase._normalize_symbol(payload.symbol),
|
||||||
|
)
|
||||||
|
if payload.is_active is not None:
|
||||||
|
code_value = getattr(currency, "code")
|
||||||
|
if code_value == DEFAULT_CURRENCY_CODE and payload.is_active is False:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="The default currency cannot be deactivated.",
|
||||||
|
)
|
||||||
|
setattr(currency, "is_active", payload.is_active)
|
||||||
|
|
||||||
|
db.add(currency)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(currency)
|
||||||
|
return currency
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{code}/activation", response_model=CurrencyRead)
|
||||||
|
def toggle_currency_activation(code: str, body: CurrencyActivation, db: Session = Depends(get_db)):
|
||||||
|
currency = _get_currency_or_404(db, code)
|
||||||
|
code_value = getattr(currency, "code")
|
||||||
|
if code_value == DEFAULT_CURRENCY_CODE and body.is_active is False:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="The default currency cannot be deactivated.",
|
||||||
|
)
|
||||||
|
|
||||||
|
setattr(currency, "is_active", body.is_active)
|
||||||
|
db.add(currency)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(currency)
|
||||||
|
return currency
|
||||||
|
|||||||
85
routes/settings.py
Normal file
85
routes/settings.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from pydantic import BaseModel, Field, model_validator
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from routes.dependencies import get_db
|
||||||
|
from services.settings import (
|
||||||
|
CSS_COLOR_DEFAULTS,
|
||||||
|
get_css_color_settings,
|
||||||
|
list_css_env_override_rows,
|
||||||
|
read_css_color_env_overrides,
|
||||||
|
update_css_color_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/settings", tags=["Settings"])
|
||||||
|
|
||||||
|
|
||||||
|
class CSSSettingsPayload(BaseModel):
|
||||||
|
variables: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_allowed_keys(self) -> "CSSSettingsPayload":
|
||||||
|
invalid = set(self.variables.keys()) - set(CSS_COLOR_DEFAULTS.keys())
|
||||||
|
if invalid:
|
||||||
|
invalid_keys = ", ".join(sorted(invalid))
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported CSS variables: {invalid_keys}."
|
||||||
|
" Accepted keys align with the default theme variables."
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class EnvOverride(BaseModel):
|
||||||
|
css_key: str
|
||||||
|
env_var: str
|
||||||
|
value: str
|
||||||
|
|
||||||
|
|
||||||
|
class CSSSettingsResponse(BaseModel):
|
||||||
|
variables: Dict[str, str]
|
||||||
|
env_overrides: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
env_sources: List[EnvOverride] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/css", response_model=CSSSettingsResponse)
|
||||||
|
def read_css_settings(db: Session = Depends(get_db)) -> CSSSettingsResponse:
|
||||||
|
try:
|
||||||
|
values = get_css_color_settings(db)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_sources = [
|
||||||
|
EnvOverride(**row)
|
||||||
|
for row in list_css_env_override_rows()
|
||||||
|
]
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
return CSSSettingsResponse(
|
||||||
|
variables=values,
|
||||||
|
env_overrides=env_overrides,
|
||||||
|
env_sources=env_sources,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/css", response_model=CSSSettingsResponse, status_code=status.HTTP_200_OK)
|
||||||
|
def update_css_settings(payload: CSSSettingsPayload, db: Session = Depends(get_db)) -> CSSSettingsResponse:
|
||||||
|
try:
|
||||||
|
values = update_css_color_settings(db, payload.variables)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_sources = [
|
||||||
|
EnvOverride(**row)
|
||||||
|
for row in list_css_env_override_rows()
|
||||||
|
]
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||||
|
detail=str(exc),
|
||||||
|
) from exc
|
||||||
|
return CSSSettingsResponse(
|
||||||
|
variables=values,
|
||||||
|
env_overrides=env_overrides,
|
||||||
|
env_sources=env_sources,
|
||||||
|
)
|
||||||
67
routes/ui.py
67
routes/ui.py
@@ -19,6 +19,13 @@ from models.simulation_result import SimulationResult
|
|||||||
from routes.dependencies import get_db
|
from routes.dependencies import get_db
|
||||||
from services.reporting import generate_report
|
from services.reporting import generate_report
|
||||||
from models.currency import Currency
|
from models.currency import Currency
|
||||||
|
from routes.currencies import DEFAULT_CURRENCY_CODE, _ensure_default_currency
|
||||||
|
from services.settings import (
|
||||||
|
CSS_COLOR_DEFAULTS,
|
||||||
|
get_css_color_settings,
|
||||||
|
list_css_env_override_rows,
|
||||||
|
read_css_color_env_overrides,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
CURRENCY_CHOICES: list[Dict[str, Any]] = [
|
CURRENCY_CHOICES: list[Dict[str, Any]] = [
|
||||||
@@ -153,6 +160,52 @@ def _load_currencies(db: Session) -> Dict[str, Any]:
|
|||||||
return {"currency_options": items}
|
return {"currency_options": items}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_currency_settings(db: Session) -> Dict[str, Any]:
|
||||||
|
_ensure_default_currency(db)
|
||||||
|
records = db.query(Currency).order_by(Currency.code).all()
|
||||||
|
currencies: list[Dict[str, Any]] = []
|
||||||
|
for record in records:
|
||||||
|
code_value = getattr(record, "code")
|
||||||
|
currencies.append(
|
||||||
|
{
|
||||||
|
"id": int(getattr(record, "id")),
|
||||||
|
"code": code_value,
|
||||||
|
"name": getattr(record, "name"),
|
||||||
|
"symbol": getattr(record, "symbol"),
|
||||||
|
"is_active": bool(getattr(record, "is_active", True)),
|
||||||
|
"is_default": code_value == DEFAULT_CURRENCY_CODE,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
active_count = sum(1 for item in currencies if item["is_active"])
|
||||||
|
inactive_count = len(currencies) - active_count
|
||||||
|
|
||||||
|
return {
|
||||||
|
"currencies": currencies,
|
||||||
|
"currency_stats": {
|
||||||
|
"total": len(currencies),
|
||||||
|
"active": active_count,
|
||||||
|
"inactive": inactive_count,
|
||||||
|
},
|
||||||
|
"default_currency_code": DEFAULT_CURRENCY_CODE,
|
||||||
|
"currency_api_base": "/api/currencies",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_css_settings(db: Session) -> Dict[str, Any]:
|
||||||
|
variables = get_css_color_settings(db)
|
||||||
|
env_overrides = read_css_color_env_overrides()
|
||||||
|
env_rows = list_css_env_override_rows()
|
||||||
|
env_meta = {row["css_key"]: row for row in env_rows}
|
||||||
|
return {
|
||||||
|
"css_variables": variables,
|
||||||
|
"css_defaults": CSS_COLOR_DEFAULTS,
|
||||||
|
"css_env_overrides": env_overrides,
|
||||||
|
"css_env_override_rows": env_rows,
|
||||||
|
"css_env_override_meta": env_meta,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _load_consumption(db: Session) -> Dict[str, Any]:
|
def _load_consumption(db: Session) -> Dict[str, Any]:
|
||||||
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
grouped: defaultdict[int, list[Dict[str, Any]]] = defaultdict(list)
|
||||||
for record in (
|
for record in (
|
||||||
@@ -637,3 +690,17 @@ async def simulations_view(request: Request, db: Session = Depends(get_db)):
|
|||||||
async def reporting_view(request: Request, db: Session = Depends(get_db)):
|
async def reporting_view(request: Request, db: Session = Depends(get_db)):
|
||||||
"""Render the reporting view with scenario KPI summaries."""
|
"""Render the reporting view with scenario KPI summaries."""
|
||||||
return _render(request, "reporting.html", _load_reporting(db))
|
return _render(request, "reporting.html", _load_reporting(db))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/ui/settings", response_class=HTMLResponse)
|
||||||
|
async def settings_view(request: Request, db: Session = Depends(get_db)):
|
||||||
|
"""Render the settings landing page."""
|
||||||
|
context = _load_css_settings(db)
|
||||||
|
return _render(request, "settings.html", context)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/ui/currencies", response_class=HTMLResponse)
|
||||||
|
async def currencies_view(request: Request, db: Session = Depends(get_db)):
|
||||||
|
"""Render the currency administration page with full currency context."""
|
||||||
|
context = _load_currency_settings(db)
|
||||||
|
return _render(request, "currencies.html", context)
|
||||||
|
|||||||
161
scripts/migrations/000_base.sql
Normal file
161
scripts/migrations/000_base.sql
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
-- Baseline migration for CalMiner database schema
|
||||||
|
-- Date: 2025-10-25
|
||||||
|
-- Purpose: Consolidate foundational tables and reference data
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- Currency reference table
|
||||||
|
CREATE TABLE IF NOT EXISTS currency (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
code VARCHAR(3) NOT NULL UNIQUE,
|
||||||
|
name VARCHAR(128) NOT NULL,
|
||||||
|
symbol VARCHAR(8),
|
||||||
|
is_active BOOLEAN NOT NULL DEFAULT TRUE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO currency (code, name, symbol, is_active)
|
||||||
|
VALUES
|
||||||
|
('USD', 'United States Dollar', 'USD$', TRUE),
|
||||||
|
('EUR', 'Euro', 'EUR', TRUE),
|
||||||
|
('CLP', 'Chilean Peso', 'CLP$', TRUE),
|
||||||
|
('RMB', 'Chinese Yuan', 'RMB', TRUE),
|
||||||
|
('GBP', 'British Pound', 'GBP', TRUE),
|
||||||
|
('CAD', 'Canadian Dollar', 'CAD$', TRUE),
|
||||||
|
('AUD', 'Australian Dollar', 'AUD$', TRUE)
|
||||||
|
ON CONFLICT (code) DO UPDATE
|
||||||
|
SET name = EXCLUDED.name,
|
||||||
|
symbol = EXCLUDED.symbol,
|
||||||
|
is_active = EXCLUDED.is_active;
|
||||||
|
|
||||||
|
-- Application-level settings table
|
||||||
|
CREATE TABLE IF NOT EXISTS application_setting (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
key VARCHAR(128) NOT NULL UNIQUE,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
value_type VARCHAR(32) NOT NULL DEFAULT 'string',
|
||||||
|
category VARCHAR(32) NOT NULL DEFAULT 'general',
|
||||||
|
description TEXT,
|
||||||
|
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS ux_application_setting_key
|
||||||
|
ON application_setting (key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_application_setting_category
|
||||||
|
ON application_setting (category);
|
||||||
|
|
||||||
|
-- Measurement unit reference table
|
||||||
|
CREATE TABLE IF NOT EXISTS measurement_unit (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
code VARCHAR(64) NOT NULL UNIQUE,
|
||||||
|
name VARCHAR(128) NOT NULL,
|
||||||
|
symbol VARCHAR(16),
|
||||||
|
unit_type VARCHAR(32) NOT NULL,
|
||||||
|
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO measurement_unit (code, name, symbol, unit_type, is_active)
|
||||||
|
VALUES
|
||||||
|
('tonnes', 'Tonnes', 't', 'mass', TRUE),
|
||||||
|
('kilograms', 'Kilograms', 'kg', 'mass', TRUE),
|
||||||
|
('pounds', 'Pounds', 'lb', 'mass', TRUE),
|
||||||
|
('liters', 'Liters', 'L', 'volume', TRUE),
|
||||||
|
('cubic_meters', 'Cubic Meters', 'm3', 'volume', TRUE),
|
||||||
|
('kilowatt_hours', 'Kilowatt Hours', 'kWh', 'energy', TRUE)
|
||||||
|
ON CONFLICT (code) DO UPDATE
|
||||||
|
SET name = EXCLUDED.name,
|
||||||
|
symbol = EXCLUDED.symbol,
|
||||||
|
unit_type = EXCLUDED.unit_type,
|
||||||
|
is_active = EXCLUDED.is_active;
|
||||||
|
|
||||||
|
-- Consumption and production measurement metadata
|
||||||
|
ALTER TABLE consumption
|
||||||
|
ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64);
|
||||||
|
ALTER TABLE consumption
|
||||||
|
ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16);
|
||||||
|
|
||||||
|
ALTER TABLE production_output
|
||||||
|
ADD COLUMN IF NOT EXISTS unit_name VARCHAR(64);
|
||||||
|
ALTER TABLE production_output
|
||||||
|
ADD COLUMN IF NOT EXISTS unit_symbol VARCHAR(16);
|
||||||
|
|
||||||
|
-- Currency integration for CAPEX and OPEX
|
||||||
|
ALTER TABLE capex
|
||||||
|
ADD COLUMN IF NOT EXISTS currency_id INTEGER;
|
||||||
|
ALTER TABLE opex
|
||||||
|
ADD COLUMN IF NOT EXISTS currency_id INTEGER;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
usd_id INTEGER;
|
||||||
|
BEGIN
|
||||||
|
-- Ensure currency_id columns align with legacy currency_code values when present
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'capex' AND column_name = 'currency_code'
|
||||||
|
) THEN
|
||||||
|
UPDATE capex AS c
|
||||||
|
SET currency_id = cur.id
|
||||||
|
FROM currency AS cur
|
||||||
|
WHERE c.currency_code = cur.code
|
||||||
|
AND (c.currency_id IS DISTINCT FROM cur.id);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'opex' AND column_name = 'currency_code'
|
||||||
|
) THEN
|
||||||
|
UPDATE opex AS o
|
||||||
|
SET currency_id = cur.id
|
||||||
|
FROM currency AS cur
|
||||||
|
WHERE o.currency_code = cur.code
|
||||||
|
AND (o.currency_id IS DISTINCT FROM cur.id);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT id INTO usd_id FROM currency WHERE code = 'USD';
|
||||||
|
IF usd_id IS NOT NULL THEN
|
||||||
|
UPDATE capex SET currency_id = usd_id WHERE currency_id IS NULL;
|
||||||
|
UPDATE opex SET currency_id = usd_id WHERE currency_id IS NULL;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
ALTER TABLE capex
|
||||||
|
ALTER COLUMN currency_id SET NOT NULL;
|
||||||
|
ALTER TABLE opex
|
||||||
|
ALTER COLUMN currency_id SET NOT NULL;
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = current_schema()
|
||||||
|
AND table_name = 'capex'
|
||||||
|
AND constraint_name = 'fk_capex_currency'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE capex
|
||||||
|
ADD CONSTRAINT fk_capex_currency FOREIGN KEY (currency_id)
|
||||||
|
REFERENCES currency (id) ON DELETE RESTRICT;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_schema = current_schema()
|
||||||
|
AND table_name = 'opex'
|
||||||
|
AND constraint_name = 'fk_opex_currency'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE opex
|
||||||
|
ADD CONSTRAINT fk_opex_currency FOREIGN KEY (currency_id)
|
||||||
|
REFERENCES currency (id) ON DELETE RESTRICT;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
ALTER TABLE capex
|
||||||
|
DROP COLUMN IF EXISTS currency_code;
|
||||||
|
ALTER TABLE opex
|
||||||
|
DROP COLUMN IF EXISTS currency_code;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
-- CalMiner Migration: add currency and unit metadata columns
|
|
||||||
-- Date: 2025-10-21
|
|
||||||
-- Purpose: align persisted schema with API changes introducing currency selection for
|
|
||||||
-- CAPEX/OPEX costs and unit selection for consumption/production records.
|
|
||||||
|
|
||||||
BEGIN;
|
|
||||||
|
|
||||||
-- CAPEX / OPEX
|
|
||||||
ALTER TABLE capex
|
|
||||||
ADD COLUMN currency_code VARCHAR(3) NOT NULL DEFAULT 'USD';
|
|
||||||
|
|
||||||
ALTER TABLE opex
|
|
||||||
ADD COLUMN currency_code VARCHAR(3) NOT NULL DEFAULT 'USD';
|
|
||||||
|
|
||||||
-- Consumption tracking
|
|
||||||
ALTER TABLE consumption
|
|
||||||
ADD COLUMN unit_name VARCHAR(64);
|
|
||||||
|
|
||||||
ALTER TABLE consumption
|
|
||||||
ADD COLUMN unit_symbol VARCHAR(16);
|
|
||||||
|
|
||||||
-- Production output
|
|
||||||
ALTER TABLE production_output
|
|
||||||
ADD COLUMN unit_name VARCHAR(64);
|
|
||||||
|
|
||||||
ALTER TABLE production_output
|
|
||||||
ADD COLUMN unit_symbol VARCHAR(16);
|
|
||||||
|
|
||||||
COMMIT;
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
-- Migration: create currency referential table and convert capex/opex to FK
|
|
||||||
-- Date: 2025-10-22
|
|
||||||
|
|
||||||
BEGIN;
|
|
||||||
|
|
||||||
-- 1) Create currency table
|
|
||||||
CREATE TABLE IF NOT EXISTS currency (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
code VARCHAR(3) NOT NULL UNIQUE,
|
|
||||||
name VARCHAR(128) NOT NULL,
|
|
||||||
symbol VARCHAR(8),
|
|
||||||
is_active BOOLEAN NOT NULL DEFAULT TRUE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- 2) Seed some common currencies (idempotent)
|
|
||||||
INSERT INTO currency (code, name, symbol, is_active)
|
|
||||||
SELECT * FROM (VALUES
|
|
||||||
('USD','United States Dollar','$',TRUE),
|
|
||||||
('EUR','Euro','€',TRUE),
|
|
||||||
('CLP','Chilean Peso','CLP$',TRUE),
|
|
||||||
('RMB','Chinese Yuan','¥',TRUE),
|
|
||||||
('GBP','British Pound','£',TRUE),
|
|
||||||
('CAD','Canadian Dollar','C$',TRUE),
|
|
||||||
('AUD','Australian Dollar','A$',TRUE)
|
|
||||||
) AS v(code,name,symbol,is_active)
|
|
||||||
ON CONFLICT (code) DO NOTHING;
|
|
||||||
|
|
||||||
-- 3) Add currency_id columns to capex and opex with nullable true to allow backfill
|
|
||||||
ALTER TABLE capex ADD COLUMN IF NOT EXISTS currency_id INTEGER;
|
|
||||||
ALTER TABLE opex ADD COLUMN IF NOT EXISTS currency_id INTEGER;
|
|
||||||
|
|
||||||
-- 4) Backfill currency_id using existing currency_code column where present
|
|
||||||
-- Only do this if the currency_code column exists
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='capex' AND column_name='currency_code') THEN
|
|
||||||
UPDATE capex SET currency_id = (
|
|
||||||
SELECT id FROM currency WHERE code = capex.currency_code LIMIT 1
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='opex' AND column_name='currency_code') THEN
|
|
||||||
UPDATE opex SET currency_id = (
|
|
||||||
SELECT id FROM currency WHERE code = opex.currency_code LIMIT 1
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
END$$;
|
|
||||||
|
|
||||||
-- 5) Make currency_id non-nullable and add FK constraint, default to USD where missing
|
|
||||||
UPDATE currency SET is_active = TRUE WHERE code = 'USD';
|
|
||||||
|
|
||||||
-- Ensure any NULL currency_id uses USD
|
|
||||||
UPDATE capex SET currency_id = (SELECT id FROM currency WHERE code='USD') WHERE currency_id IS NULL;
|
|
||||||
UPDATE opex SET currency_id = (SELECT id FROM currency WHERE code='USD') WHERE currency_id IS NULL;
|
|
||||||
|
|
||||||
ALTER TABLE capex ALTER COLUMN currency_id SET NOT NULL;
|
|
||||||
ALTER TABLE opex ALTER COLUMN currency_id SET NOT NULL;
|
|
||||||
|
|
||||||
ALTER TABLE capex ADD CONSTRAINT fk_capex_currency FOREIGN KEY (currency_id) REFERENCES currency(id);
|
|
||||||
ALTER TABLE opex ADD CONSTRAINT fk_opex_currency FOREIGN KEY (currency_id) REFERENCES currency(id);
|
|
||||||
|
|
||||||
-- 6) Optionally drop old currency_code columns if they exist
|
|
||||||
ALTER TABLE capex DROP COLUMN IF EXISTS currency_code;
|
|
||||||
ALTER TABLE opex DROP COLUMN IF EXISTS currency_code;
|
|
||||||
|
|
||||||
COMMIT;
|
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
-- Migration: Create application_setting table for configurable application options
|
||||||
|
-- Date: 2025-10-25
|
||||||
|
-- Description: Introduces persistent storage for application-level settings such as theme colors.
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS application_setting (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
key VARCHAR(128) NOT NULL UNIQUE,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
value_type VARCHAR(32) NOT NULL DEFAULT 'string',
|
||||||
|
category VARCHAR(32) NOT NULL DEFAULT 'general',
|
||||||
|
description TEXT,
|
||||||
|
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS ux_application_setting_key
|
||||||
|
ON application_setting (key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS ix_application_setting_category
|
||||||
|
ON application_setting (category);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
162
scripts/seed_data.py
Normal file
162
scripts/seed_data.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
"""Seed baseline data for CalMiner in an idempotent manner.
|
||||||
|
|
||||||
|
Usage examples
|
||||||
|
--------------
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Use existing environment variables (or load from setup_test.env.example)
|
||||||
|
python scripts/seed_data.py --currencies --units --defaults
|
||||||
|
|
||||||
|
# Dry-run to preview actions
|
||||||
|
python scripts/seed_data.py --currencies --dry-run
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2 import errors
|
||||||
|
from psycopg2.extras import execute_values
|
||||||
|
|
||||||
|
from scripts.setup_database import DatabaseConfig
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CURRENCY_SEEDS = (
|
||||||
|
("USD", "United States Dollar", "USD$", True),
|
||||||
|
("EUR", "Euro", "EUR", True),
|
||||||
|
("CLP", "Chilean Peso", "CLP$", True),
|
||||||
|
("RMB", "Chinese Yuan", "RMB", True),
|
||||||
|
("GBP", "British Pound", "GBP", True),
|
||||||
|
("CAD", "Canadian Dollar", "CAD$", True),
|
||||||
|
("AUD", "Australian Dollar", "AUD$", True),
|
||||||
|
)
|
||||||
|
|
||||||
|
MEASUREMENT_UNIT_SEEDS = (
|
||||||
|
("tonnes", "Tonnes", "t", "mass", True),
|
||||||
|
("kilograms", "Kilograms", "kg", "mass", True),
|
||||||
|
("pounds", "Pounds", "lb", "mass", True),
|
||||||
|
("liters", "Liters", "L", "volume", True),
|
||||||
|
("cubic_meters", "Cubic Meters", "m3", "volume", True),
|
||||||
|
("kilowatt_hours", "Kilowatt Hours", "kWh", "energy", True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description="Seed baseline CalMiner data")
|
||||||
|
parser.add_argument("--currencies", action="store_true", help="Seed currency table")
|
||||||
|
parser.add_argument("--units", action="store_true", help="Seed unit table")
|
||||||
|
parser.add_argument("--defaults", action="store_true", help="Seed default records")
|
||||||
|
parser.add_argument("--dry-run", action="store_true", help="Print actions without executing")
|
||||||
|
parser.add_argument(
|
||||||
|
"--verbose", "-v", action="count", default=0, help="Increase logging verbosity"
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_logging(args: argparse.Namespace) -> None:
|
||||||
|
level = logging.WARNING - (10 * min(args.verbose, 2))
|
||||||
|
logging.basicConfig(level=max(level, logging.INFO), format="%(levelname)s %(message)s")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
args = parse_args()
|
||||||
|
run_with_namespace(args)
|
||||||
|
|
||||||
|
|
||||||
|
def run_with_namespace(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
*,
|
||||||
|
config: Optional[DatabaseConfig] = None,
|
||||||
|
) -> None:
|
||||||
|
_configure_logging(args)
|
||||||
|
|
||||||
|
if not any((args.currencies, args.units, args.defaults)):
|
||||||
|
logger.info("No seeding options provided; exiting")
|
||||||
|
return
|
||||||
|
|
||||||
|
config = config or DatabaseConfig.from_env()
|
||||||
|
with psycopg2.connect(config.application_dsn()) as conn:
|
||||||
|
conn.autocommit = True
|
||||||
|
with conn.cursor() as cursor:
|
||||||
|
if args.currencies:
|
||||||
|
_seed_currencies(cursor, dry_run=args.dry_run)
|
||||||
|
if args.units:
|
||||||
|
_seed_units(cursor, dry_run=args.dry_run)
|
||||||
|
if args.defaults:
|
||||||
|
_seed_defaults(cursor, dry_run=args.dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_currencies(cursor, *, dry_run: bool) -> None:
|
||||||
|
logger.info("Seeding currency table (%d rows)", len(CURRENCY_SEEDS))
|
||||||
|
if dry_run:
|
||||||
|
for code, name, symbol, active in CURRENCY_SEEDS:
|
||||||
|
logger.info("Dry run: would upsert currency %s (%s)", code, name)
|
||||||
|
return
|
||||||
|
|
||||||
|
execute_values(
|
||||||
|
cursor,
|
||||||
|
"""
|
||||||
|
INSERT INTO currency (code, name, symbol, is_active)
|
||||||
|
VALUES %s
|
||||||
|
ON CONFLICT (code) DO UPDATE
|
||||||
|
SET name = EXCLUDED.name,
|
||||||
|
symbol = EXCLUDED.symbol,
|
||||||
|
is_active = EXCLUDED.is_active
|
||||||
|
""",
|
||||||
|
CURRENCY_SEEDS,
|
||||||
|
)
|
||||||
|
logger.info("Currency seed complete")
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_units(cursor, *, dry_run: bool) -> None:
|
||||||
|
total = len(MEASUREMENT_UNIT_SEEDS)
|
||||||
|
logger.info("Seeding measurement_unit table (%d rows)", total)
|
||||||
|
if dry_run:
|
||||||
|
for code, name, symbol, unit_type, _ in MEASUREMENT_UNIT_SEEDS:
|
||||||
|
logger.info(
|
||||||
|
"Dry run: would upsert measurement unit %s (%s - %s)",
|
||||||
|
code,
|
||||||
|
name,
|
||||||
|
unit_type,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
execute_values(
|
||||||
|
cursor,
|
||||||
|
"""
|
||||||
|
INSERT INTO measurement_unit (code, name, symbol, unit_type, is_active)
|
||||||
|
VALUES %s
|
||||||
|
ON CONFLICT (code) DO UPDATE
|
||||||
|
SET name = EXCLUDED.name,
|
||||||
|
symbol = EXCLUDED.symbol,
|
||||||
|
unit_type = EXCLUDED.unit_type,
|
||||||
|
is_active = EXCLUDED.is_active
|
||||||
|
""",
|
||||||
|
MEASUREMENT_UNIT_SEEDS,
|
||||||
|
)
|
||||||
|
except errors.UndefinedTable:
|
||||||
|
logger.warning(
|
||||||
|
"measurement_unit table does not exist; skipping unit seeding."
|
||||||
|
)
|
||||||
|
cursor.connection.rollback()
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Measurement unit seed complete")
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_defaults(cursor, *, dry_run: bool) -> None:
|
||||||
|
logger.info("Seeding default records - not yet implemented")
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
1188
scripts/setup_database.py
Normal file
1188
scripts/setup_database.py
Normal file
File diff suppressed because it is too large
Load Diff
208
services/settings.py
Normal file
208
services/settings.py
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Dict, Mapping
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from models.application_setting import ApplicationSetting
|
||||||
|
|
||||||
|
CSS_COLOR_CATEGORY = "theme"
|
||||||
|
CSS_COLOR_VALUE_TYPE = "color"
|
||||||
|
CSS_ENV_PREFIX = "CALMINER_THEME_"
|
||||||
|
|
||||||
|
CSS_COLOR_DEFAULTS: Dict[str, str] = {
|
||||||
|
"--color-background": "#f4f5f7",
|
||||||
|
"--color-surface": "#ffffff",
|
||||||
|
"--color-text-primary": "#2a1f33",
|
||||||
|
"--color-text-secondary": "#624769",
|
||||||
|
"--color-text-muted": "#64748b",
|
||||||
|
"--color-text-subtle": "#94a3b8",
|
||||||
|
"--color-text-invert": "#ffffff",
|
||||||
|
"--color-text-dark": "#0f172a",
|
||||||
|
"--color-text-strong": "#111827",
|
||||||
|
"--color-primary": "#5f320d",
|
||||||
|
"--color-primary-strong": "#7e4c13",
|
||||||
|
"--color-primary-stronger": "#837c15",
|
||||||
|
"--color-accent": "#bff838",
|
||||||
|
"--color-border": "#e2e8f0",
|
||||||
|
"--color-border-strong": "#cbd5e1",
|
||||||
|
"--color-highlight": "#eef2ff",
|
||||||
|
"--color-panel-shadow": "rgba(15, 23, 42, 0.08)",
|
||||||
|
"--color-panel-shadow-deep": "rgba(15, 23, 42, 0.12)",
|
||||||
|
"--color-surface-alt": "#f8fafc",
|
||||||
|
"--color-success": "#047857",
|
||||||
|
"--color-error": "#b91c1c",
|
||||||
|
}
|
||||||
|
|
||||||
|
_COLOR_VALUE_PATTERN = re.compile(
|
||||||
|
r"^(#([0-9a-fA-F]{3}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})|rgba?\([^)]+\)|hsla?\([^)]+\))$",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_css_color_settings(db: Session) -> Dict[str, ApplicationSetting]:
|
||||||
|
"""Ensure the CSS color defaults exist in the settings table."""
|
||||||
|
|
||||||
|
existing = (
|
||||||
|
db.query(ApplicationSetting)
|
||||||
|
.filter(ApplicationSetting.key.in_(CSS_COLOR_DEFAULTS.keys()))
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
by_key = {setting.key: setting for setting in existing}
|
||||||
|
|
||||||
|
created = False
|
||||||
|
for key, default_value in CSS_COLOR_DEFAULTS.items():
|
||||||
|
if key in by_key:
|
||||||
|
continue
|
||||||
|
setting = ApplicationSetting(
|
||||||
|
key=key,
|
||||||
|
value=default_value,
|
||||||
|
value_type=CSS_COLOR_VALUE_TYPE,
|
||||||
|
category=CSS_COLOR_CATEGORY,
|
||||||
|
description=f"CSS variable {key}",
|
||||||
|
is_editable=True,
|
||||||
|
)
|
||||||
|
db.add(setting)
|
||||||
|
by_key[key] = setting
|
||||||
|
created = True
|
||||||
|
|
||||||
|
if created:
|
||||||
|
db.commit()
|
||||||
|
for key, setting in by_key.items():
|
||||||
|
db.refresh(setting)
|
||||||
|
|
||||||
|
return by_key
|
||||||
|
|
||||||
|
|
||||||
|
def get_css_color_settings(db: Session) -> Dict[str, str]:
|
||||||
|
"""Return CSS color variables, filling missing values with defaults."""
|
||||||
|
|
||||||
|
settings = ensure_css_color_settings(db)
|
||||||
|
values: Dict[str, str] = {
|
||||||
|
key: settings[key].value if key in settings else default
|
||||||
|
for key, default in CSS_COLOR_DEFAULTS.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
env_overrides = read_css_color_env_overrides(os.environ)
|
||||||
|
if env_overrides:
|
||||||
|
values.update(env_overrides)
|
||||||
|
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
def update_css_color_settings(db: Session, updates: Mapping[str, str]) -> Dict[str, str]:
|
||||||
|
"""Persist provided CSS color overrides and return the final values."""
|
||||||
|
|
||||||
|
if not updates:
|
||||||
|
return get_css_color_settings(db)
|
||||||
|
|
||||||
|
invalid_keys = sorted(set(updates.keys()) - set(CSS_COLOR_DEFAULTS.keys()))
|
||||||
|
if invalid_keys:
|
||||||
|
invalid_list = ", ".join(invalid_keys)
|
||||||
|
raise ValueError(f"Unsupported CSS variables: {invalid_list}")
|
||||||
|
|
||||||
|
normalized: Dict[str, str] = {}
|
||||||
|
for key, value in updates.items():
|
||||||
|
normalized[key] = _normalize_color_value(value)
|
||||||
|
|
||||||
|
settings = ensure_css_color_settings(db)
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
for key, value in normalized.items():
|
||||||
|
setting = settings[key]
|
||||||
|
if setting.value != value:
|
||||||
|
setting.value = value
|
||||||
|
changed = True
|
||||||
|
if setting.value_type != CSS_COLOR_VALUE_TYPE:
|
||||||
|
setting.value_type = CSS_COLOR_VALUE_TYPE
|
||||||
|
changed = True
|
||||||
|
if setting.category != CSS_COLOR_CATEGORY:
|
||||||
|
setting.category = CSS_COLOR_CATEGORY
|
||||||
|
changed = True
|
||||||
|
if not setting.is_editable:
|
||||||
|
setting.is_editable = True
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
db.commit()
|
||||||
|
for key in normalized.keys():
|
||||||
|
db.refresh(settings[key])
|
||||||
|
|
||||||
|
return get_css_color_settings(db)
|
||||||
|
|
||||||
|
|
||||||
|
def read_css_color_env_overrides(
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Return validated CSS overrides sourced from environment variables."""
|
||||||
|
|
||||||
|
if env is None:
|
||||||
|
env = os.environ
|
||||||
|
|
||||||
|
overrides: Dict[str, str] = {}
|
||||||
|
for css_key in CSS_COLOR_DEFAULTS.keys():
|
||||||
|
env_name = css_key_to_env_var(css_key)
|
||||||
|
raw_value = env.get(env_name)
|
||||||
|
if raw_value is None:
|
||||||
|
continue
|
||||||
|
overrides[css_key] = _normalize_color_value(raw_value)
|
||||||
|
|
||||||
|
return overrides
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_color_value(value: str) -> str:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError("Color value must be a string")
|
||||||
|
trimmed = value.strip()
|
||||||
|
if not trimmed:
|
||||||
|
raise ValueError("Color value cannot be empty")
|
||||||
|
if not _COLOR_VALUE_PATTERN.match(trimmed):
|
||||||
|
raise ValueError(
|
||||||
|
"Color value must be a hex code or an rgb/rgba/hsl/hsla expression"
|
||||||
|
)
|
||||||
|
_validate_functional_color(trimmed)
|
||||||
|
return trimmed
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_functional_color(value: str) -> None:
|
||||||
|
lowered = value.lower()
|
||||||
|
if lowered.startswith("rgb(") or lowered.startswith("hsl("):
|
||||||
|
_ensure_component_count(value, expected=3)
|
||||||
|
elif lowered.startswith("rgba(") or lowered.startswith("hsla("):
|
||||||
|
_ensure_component_count(value, expected=4)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_component_count(value: str, expected: int) -> None:
|
||||||
|
if not value.endswith(")"):
|
||||||
|
raise ValueError("Color function expressions must end with a closing parenthesis")
|
||||||
|
inner = value[value.index("(") + 1 : -1]
|
||||||
|
parts = [segment.strip() for segment in inner.split(",")]
|
||||||
|
if len(parts) != expected:
|
||||||
|
raise ValueError(
|
||||||
|
"Color function expressions must provide the expected number of components"
|
||||||
|
)
|
||||||
|
if any(not component for component in parts):
|
||||||
|
raise ValueError("Color function components cannot be empty")
|
||||||
|
|
||||||
|
|
||||||
|
def css_key_to_env_var(css_key: str) -> str:
|
||||||
|
sanitized = css_key.lstrip("-").replace("-", "_").upper()
|
||||||
|
return f"{CSS_ENV_PREFIX}{sanitized}"
|
||||||
|
|
||||||
|
|
||||||
|
def list_css_env_override_rows(
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
) -> list[Dict[str, str]]:
|
||||||
|
overrides = read_css_color_env_overrides(env)
|
||||||
|
rows: list[Dict[str, str]] = []
|
||||||
|
for css_key, value in overrides.items():
|
||||||
|
rows.append(
|
||||||
|
{
|
||||||
|
"css_key": css_key,
|
||||||
|
"env_var": css_key_to_env_var(css_key),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return rows
|
||||||
@@ -117,6 +117,37 @@ body {
|
|||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.sidebar-section {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.35rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section + .sidebar-section {
|
||||||
|
margin-top: 1.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section-label {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.06em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: rgba(255, 255, 255, 0.52);
|
||||||
|
padding: 0 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-section-links {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.25rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-link-block {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
.sidebar-link {
|
.sidebar-link {
|
||||||
display: inline-flex;
|
display: inline-flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
@@ -142,6 +173,39 @@ body {
|
|||||||
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.25);
|
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.25);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.sidebar-sublinks {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.2rem;
|
||||||
|
padding-left: 1.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
color: rgba(255, 255, 255, 0.74);
|
||||||
|
font-weight: 500;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
text-decoration: none;
|
||||||
|
padding: 0.35rem 0.75rem;
|
||||||
|
border-radius: 8px;
|
||||||
|
transition: background 0.2s ease, color 0.2s ease, transform 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink:hover,
|
||||||
|
.sidebar-sublink:focus {
|
||||||
|
background: rgba(148, 197, 255, 0.18);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
transform: translateX(3px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-sublink.is-active {
|
||||||
|
background: rgba(148, 197, 255, 0.28);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
box-shadow: inset 0 0 0 1px rgba(255, 255, 255, 0.18);
|
||||||
|
}
|
||||||
|
|
||||||
.app-main {
|
.app-main {
|
||||||
background-color: var(--color-background);
|
background-color: var(--color-background);
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -185,6 +249,159 @@ body {
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.page-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 1.5rem;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.page-subtitle {
|
||||||
|
margin-top: 0.35rem;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-size: 0.95rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
gap: 1.5rem;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card {
|
||||||
|
background: var(--color-surface);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 1.5rem;
|
||||||
|
box-shadow: 0 4px 14px var(--color-panel-shadow);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card h2 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card p {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-card-note {
|
||||||
|
font-size: 0.85rem;
|
||||||
|
color: var(--color-text-subtle);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-grid {
|
||||||
|
max-width: none;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-field {
|
||||||
|
background: var(--color-surface-alt);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: var(--space-sm);
|
||||||
|
box-shadow: inset 0 1px 2px rgba(15, 23, 42, 0.08);
|
||||||
|
gap: var(--space-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-form-field.is-env-override {
|
||||||
|
background: rgba(191, 248, 56, 0.12);
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: var(--space-sm);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-strong);
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-default {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-field-helper {
|
||||||
|
font-size: 0.8rem;
|
||||||
|
color: var(--color-text-subtle);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-env-flag {
|
||||||
|
font-size: 0.78rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-accent);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-input-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-value-input {
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-value-input[disabled] {
|
||||||
|
background-color: rgba(148, 197, 255, 0.16);
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-preview {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 8px;
|
||||||
|
border: 1px solid var(--color-border-strong);
|
||||||
|
box-shadow: inset 0 0 0 1px rgba(15, 23, 42, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table th,
|
||||||
|
.env-overrides-table td {
|
||||||
|
padding: 0.65rem 0.75rem;
|
||||||
|
text-align: left;
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.env-overrides-table code {
|
||||||
|
font-family: "Fira Code", "Consolas", "Courier New", monospace;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.button-link {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: fit-content;
|
||||||
|
padding: 0.55rem 1.2rem;
|
||||||
|
border-radius: 999px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-decoration: none;
|
||||||
|
background: var(--color-primary);
|
||||||
|
color: var(--color-text-invert);
|
||||||
|
transition: transform 0.2s ease, box-shadow 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.button-link:hover,
|
||||||
|
.button-link:focus {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
box-shadow: 0 8px 18px var(--color-panel-shadow);
|
||||||
|
}
|
||||||
|
|
||||||
.dashboard-metrics-grid {
|
.dashboard-metrics-grid {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
||||||
|
|||||||
537
static/js/currencies.js
Normal file
537
static/js/currencies.js
Normal file
@@ -0,0 +1,537 @@
|
|||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const dataElement = document.getElementById("currencies-data");
|
||||||
|
const editorSection = document.getElementById("currencies-editor");
|
||||||
|
const tableBody = document.getElementById("currencies-table-body");
|
||||||
|
const tableEmptyState = document.getElementById("currencies-table-empty");
|
||||||
|
const metrics = {
|
||||||
|
total: document.getElementById("currency-metric-total"),
|
||||||
|
active: document.getElementById("currency-metric-active"),
|
||||||
|
inactive: document.getElementById("currency-metric-inactive"),
|
||||||
|
};
|
||||||
|
|
||||||
|
const form = document.getElementById("currency-form");
|
||||||
|
const existingSelect = document.getElementById("currency-form-existing");
|
||||||
|
const codeInput = document.getElementById("currency-form-code");
|
||||||
|
const nameInput = document.getElementById("currency-form-name");
|
||||||
|
const symbolInput = document.getElementById("currency-form-symbol");
|
||||||
|
const statusSelect = document.getElementById("currency-form-status");
|
||||||
|
const resetButton = document.getElementById("currency-form-reset");
|
||||||
|
const feedbackElement = document.getElementById("currency-form-feedback");
|
||||||
|
|
||||||
|
const saveButton = form ? form.querySelector("button[type='submit']") : null;
|
||||||
|
|
||||||
|
const uppercaseCode = (value) =>
|
||||||
|
(value || "").toString().trim().toUpperCase();
|
||||||
|
const normalizeSymbol = (value) => {
|
||||||
|
if (value === undefined || value === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const trimmed = String(value).trim();
|
||||||
|
return trimmed ? trimmed : null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const normalizeApiBase = (value) => {
|
||||||
|
if (!value || typeof value !== "string") {
|
||||||
|
return "/api/currencies";
|
||||||
|
}
|
||||||
|
return value.endsWith("/") ? value.slice(0, -1) : value;
|
||||||
|
};
|
||||||
|
|
||||||
|
let currencies = [];
|
||||||
|
let apiBase = "/api/currencies";
|
||||||
|
let defaultCurrencyCode = "USD";
|
||||||
|
|
||||||
|
const buildCurrencyRecord = (record) => {
|
||||||
|
if (!record || typeof record !== "object") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const code = uppercaseCode(record.code);
|
||||||
|
return {
|
||||||
|
id: record.id ?? null,
|
||||||
|
code,
|
||||||
|
name: record.name || "",
|
||||||
|
symbol: record.symbol || "",
|
||||||
|
is_active: Boolean(record.is_active),
|
||||||
|
is_default: code === defaultCurrencyCode,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const findCurrencyIndex = (code) => {
|
||||||
|
return currencies.findIndex((item) => item.code === code);
|
||||||
|
};
|
||||||
|
|
||||||
|
const upsertCurrency = (record) => {
|
||||||
|
const normalized = buildCurrencyRecord(record);
|
||||||
|
if (!normalized) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const existingIndex = findCurrencyIndex(normalized.code);
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
currencies[existingIndex] = normalized;
|
||||||
|
} else {
|
||||||
|
currencies.push(normalized);
|
||||||
|
}
|
||||||
|
currencies.sort((a, b) => a.code.localeCompare(b.code));
|
||||||
|
return normalized;
|
||||||
|
};
|
||||||
|
|
||||||
|
const replaceCurrencyList = (records) => {
|
||||||
|
if (!Array.isArray(records)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
currencies = records
|
||||||
|
.map((record) => buildCurrencyRecord(record))
|
||||||
|
.filter((record) => record !== null)
|
||||||
|
.sort((a, b) => a.code.localeCompare(b.code));
|
||||||
|
};
|
||||||
|
|
||||||
|
const applyPayload = () => {
|
||||||
|
if (!dataElement) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(dataElement.textContent || "{}");
|
||||||
|
if (parsed && typeof parsed === "object") {
|
||||||
|
if (parsed.default_currency_code) {
|
||||||
|
defaultCurrencyCode = uppercaseCode(parsed.default_currency_code);
|
||||||
|
}
|
||||||
|
if (parsed.currency_api_base) {
|
||||||
|
apiBase = normalizeApiBase(parsed.currency_api_base);
|
||||||
|
}
|
||||||
|
if (Array.isArray(parsed.currencies)) {
|
||||||
|
replaceCurrencyList(parsed.currencies);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Unable to parse currencies payload", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const showFeedback = (message, type = "success") => {
|
||||||
|
if (!feedbackElement) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
feedbackElement.textContent = message;
|
||||||
|
feedbackElement.classList.remove("hidden", "success", "error");
|
||||||
|
feedbackElement.classList.add(type);
|
||||||
|
};
|
||||||
|
|
||||||
|
const hideFeedback = () => {
|
||||||
|
if (!feedbackElement) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
feedbackElement.classList.add("hidden");
|
||||||
|
feedbackElement.classList.remove("success", "error");
|
||||||
|
feedbackElement.textContent = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
const setButtonLoading = (button, isLoading) => {
|
||||||
|
if (!button) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
button.disabled = isLoading;
|
||||||
|
button.classList.toggle("is-loading", isLoading);
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateMetrics = () => {
|
||||||
|
const total = currencies.length;
|
||||||
|
const active = currencies.filter((item) => item.is_active).length;
|
||||||
|
const inactive = total - active;
|
||||||
|
if (metrics.total) {
|
||||||
|
metrics.total.textContent = String(total);
|
||||||
|
}
|
||||||
|
if (metrics.active) {
|
||||||
|
metrics.active.textContent = String(active);
|
||||||
|
}
|
||||||
|
if (metrics.inactive) {
|
||||||
|
metrics.inactive.textContent = String(inactive);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderExistingOptions = (
|
||||||
|
selectedCode = existingSelect ? existingSelect.value : ""
|
||||||
|
) => {
|
||||||
|
if (!existingSelect) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const placeholder = existingSelect.querySelector("option[value='']");
|
||||||
|
const placeholderClone = placeholder ? placeholder.cloneNode(true) : null;
|
||||||
|
existingSelect.innerHTML = "";
|
||||||
|
if (placeholderClone) {
|
||||||
|
existingSelect.appendChild(placeholderClone);
|
||||||
|
}
|
||||||
|
const fragment = document.createDocumentFragment();
|
||||||
|
currencies.forEach((currency) => {
|
||||||
|
const option = document.createElement("option");
|
||||||
|
option.value = currency.code;
|
||||||
|
option.textContent = currency.name
|
||||||
|
? `${currency.name} (${currency.code})`
|
||||||
|
: currency.code;
|
||||||
|
if (selectedCode === currency.code) {
|
||||||
|
option.selected = true;
|
||||||
|
}
|
||||||
|
fragment.appendChild(option);
|
||||||
|
});
|
||||||
|
existingSelect.appendChild(fragment);
|
||||||
|
if (
|
||||||
|
selectedCode &&
|
||||||
|
!currencies.some((item) => item.code === selectedCode)
|
||||||
|
) {
|
||||||
|
existingSelect.value = "";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderTable = () => {
|
||||||
|
if (!tableBody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
tableBody.innerHTML = "";
|
||||||
|
if (!currencies.length) {
|
||||||
|
if (tableEmptyState) {
|
||||||
|
tableEmptyState.classList.remove("hidden");
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (tableEmptyState) {
|
||||||
|
tableEmptyState.classList.add("hidden");
|
||||||
|
}
|
||||||
|
const fragment = document.createDocumentFragment();
|
||||||
|
currencies.forEach((currency) => {
|
||||||
|
const row = document.createElement("tr");
|
||||||
|
|
||||||
|
const codeCell = document.createElement("td");
|
||||||
|
codeCell.textContent = currency.code;
|
||||||
|
row.appendChild(codeCell);
|
||||||
|
|
||||||
|
const nameCell = document.createElement("td");
|
||||||
|
nameCell.textContent = currency.name || "—";
|
||||||
|
row.appendChild(nameCell);
|
||||||
|
|
||||||
|
const symbolCell = document.createElement("td");
|
||||||
|
symbolCell.textContent = currency.symbol || "—";
|
||||||
|
row.appendChild(symbolCell);
|
||||||
|
|
||||||
|
const statusCell = document.createElement("td");
|
||||||
|
statusCell.textContent = currency.is_active ? "Active" : "Inactive";
|
||||||
|
if (currency.is_default) {
|
||||||
|
statusCell.textContent += " (Default)";
|
||||||
|
}
|
||||||
|
row.appendChild(statusCell);
|
||||||
|
|
||||||
|
const actionsCell = document.createElement("td");
|
||||||
|
const editButton = document.createElement("button");
|
||||||
|
editButton.type = "button";
|
||||||
|
editButton.className = "btn";
|
||||||
|
editButton.dataset.action = "edit";
|
||||||
|
editButton.dataset.code = currency.code;
|
||||||
|
editButton.textContent = "Edit";
|
||||||
|
editButton.style.marginRight = "0.5rem";
|
||||||
|
|
||||||
|
const toggleButton = document.createElement("button");
|
||||||
|
toggleButton.type = "button";
|
||||||
|
toggleButton.className = "btn";
|
||||||
|
toggleButton.dataset.action = "toggle";
|
||||||
|
toggleButton.dataset.code = currency.code;
|
||||||
|
toggleButton.textContent = currency.is_active ? "Deactivate" : "Activate";
|
||||||
|
if (currency.is_default && currency.is_active) {
|
||||||
|
toggleButton.disabled = true;
|
||||||
|
toggleButton.title = "The default currency must remain active.";
|
||||||
|
}
|
||||||
|
|
||||||
|
actionsCell.appendChild(editButton);
|
||||||
|
actionsCell.appendChild(toggleButton);
|
||||||
|
|
||||||
|
row.appendChild(actionsCell);
|
||||||
|
fragment.appendChild(row);
|
||||||
|
});
|
||||||
|
tableBody.appendChild(fragment);
|
||||||
|
};
|
||||||
|
|
||||||
|
const refreshUI = (selectedCode) => {
|
||||||
|
currencies.sort((a, b) => a.code.localeCompare(b.code));
|
||||||
|
renderTable();
|
||||||
|
renderExistingOptions(selectedCode);
|
||||||
|
updateMetrics();
|
||||||
|
};
|
||||||
|
|
||||||
|
const findCurrency = (code) =>
|
||||||
|
currencies.find((item) => item.code === code) || null;
|
||||||
|
|
||||||
|
const setFormForCurrency = (currency) => {
|
||||||
|
if (!form || !codeInput || !nameInput || !symbolInput || !statusSelect) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!currency) {
|
||||||
|
form.reset();
|
||||||
|
if (existingSelect) {
|
||||||
|
existingSelect.value = "";
|
||||||
|
}
|
||||||
|
codeInput.readOnly = false;
|
||||||
|
codeInput.value = "";
|
||||||
|
nameInput.value = "";
|
||||||
|
symbolInput.value = "";
|
||||||
|
statusSelect.disabled = false;
|
||||||
|
statusSelect.value = "true";
|
||||||
|
statusSelect.title = "";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingSelect) {
|
||||||
|
existingSelect.value = currency.code;
|
||||||
|
}
|
||||||
|
codeInput.readOnly = true;
|
||||||
|
codeInput.value = currency.code;
|
||||||
|
nameInput.value = currency.name || "";
|
||||||
|
symbolInput.value = currency.symbol || "";
|
||||||
|
statusSelect.value = currency.is_active ? "true" : "false";
|
||||||
|
if (currency.is_default) {
|
||||||
|
statusSelect.disabled = true;
|
||||||
|
statusSelect.value = "true";
|
||||||
|
statusSelect.title = "The default currency must remain active.";
|
||||||
|
} else {
|
||||||
|
statusSelect.disabled = false;
|
||||||
|
statusSelect.title = "";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const resetFormState = () => {
|
||||||
|
setFormForCurrency(null);
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseError = async (response, fallbackMessage) => {
|
||||||
|
try {
|
||||||
|
const detail = await response.json();
|
||||||
|
if (detail && typeof detail === "object" && detail.detail) {
|
||||||
|
return detail.detail;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// ignore JSON parse errors
|
||||||
|
}
|
||||||
|
return fallbackMessage;
|
||||||
|
};
|
||||||
|
|
||||||
|
const fetchCurrenciesFromApi = async () => {
|
||||||
|
const url = `${apiBase}/?include_inactive=true`;
|
||||||
|
try {
|
||||||
|
const response = await fetch(url);
|
||||||
|
if (!response.ok) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const list = await response.json();
|
||||||
|
if (Array.isArray(list)) {
|
||||||
|
replaceCurrencyList(list);
|
||||||
|
refreshUI(existingSelect ? existingSelect.value : undefined);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Unable to refresh currency list", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSubmit = async (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
hideFeedback();
|
||||||
|
if (!form || !codeInput || !nameInput || !statusSelect) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const editingCode = existingSelect
|
||||||
|
? uppercaseCode(existingSelect.value)
|
||||||
|
: "";
|
||||||
|
const codeValue = uppercaseCode(codeInput.value);
|
||||||
|
const nameValue = (nameInput.value || "").trim();
|
||||||
|
const symbolValue = normalizeSymbol(symbolInput ? symbolInput.value : "");
|
||||||
|
const isActive = statusSelect.value !== "false";
|
||||||
|
|
||||||
|
if (!nameValue) {
|
||||||
|
showFeedback("Provide a currency name.", "error");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!editingCode) {
|
||||||
|
if (!codeValue || codeValue.length !== 3) {
|
||||||
|
showFeedback("Provide a three-letter currency code.", "error");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = editingCode
|
||||||
|
? {
|
||||||
|
name: nameValue,
|
||||||
|
symbol: symbolValue,
|
||||||
|
is_active: isActive,
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
code: codeValue,
|
||||||
|
name: nameValue,
|
||||||
|
symbol: symbolValue,
|
||||||
|
is_active: isActive,
|
||||||
|
};
|
||||||
|
|
||||||
|
const targetCode = editingCode || codeValue;
|
||||||
|
const url = editingCode
|
||||||
|
? `${apiBase}/${encodeURIComponent(editingCode)}`
|
||||||
|
: `${apiBase}/`;
|
||||||
|
|
||||||
|
setButtonLoading(saveButton, true);
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: editingCode ? "PUT" : "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const message = await parseError(
|
||||||
|
response,
|
||||||
|
editingCode
|
||||||
|
? "Unable to update the currency."
|
||||||
|
: "Unable to create the currency."
|
||||||
|
);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
const updated = upsertCurrency(result);
|
||||||
|
defaultCurrencyCode = uppercaseCode(defaultCurrencyCode);
|
||||||
|
refreshUI(updated ? updated.code : targetCode);
|
||||||
|
|
||||||
|
if (editingCode) {
|
||||||
|
showFeedback("Currency updated successfully.");
|
||||||
|
if (updated) {
|
||||||
|
setFormForCurrency(updated);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
showFeedback("Currency created successfully.");
|
||||||
|
resetFormState();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showFeedback(error.message || "An unexpected error occurred.", "error");
|
||||||
|
} finally {
|
||||||
|
setButtonLoading(saveButton, false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleToggle = async (code, button) => {
|
||||||
|
const record = findCurrency(code);
|
||||||
|
if (!record) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
hideFeedback();
|
||||||
|
const nextState = !record.is_active;
|
||||||
|
const url = `${apiBase}/${encodeURIComponent(code)}/activation`;
|
||||||
|
setButtonLoading(button, true);
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: "PATCH",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({ is_active: nextState }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const message = await parseError(
|
||||||
|
response,
|
||||||
|
nextState
|
||||||
|
? "Unable to activate the currency."
|
||||||
|
: "Unable to deactivate the currency."
|
||||||
|
);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
const updated = upsertCurrency(result);
|
||||||
|
refreshUI(updated ? updated.code : code);
|
||||||
|
if (existingSelect && existingSelect.value === code && updated) {
|
||||||
|
setFormForCurrency(updated);
|
||||||
|
}
|
||||||
|
const actionMessage = nextState
|
||||||
|
? `Currency ${code} activated.`
|
||||||
|
: `Currency ${code} deactivated.`;
|
||||||
|
showFeedback(actionMessage);
|
||||||
|
} catch (error) {
|
||||||
|
showFeedback(error.message || "An unexpected error occurred.", "error");
|
||||||
|
} finally {
|
||||||
|
setButtonLoading(button, false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTableClick = (event) => {
|
||||||
|
const button = event.target.closest("button[data-action]");
|
||||||
|
if (!button) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const code = uppercaseCode(button.dataset.code);
|
||||||
|
const action = button.dataset.action;
|
||||||
|
if (!code || !action) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (action === "edit") {
|
||||||
|
const currency = findCurrency(code);
|
||||||
|
if (currency) {
|
||||||
|
setFormForCurrency(currency);
|
||||||
|
hideFeedback();
|
||||||
|
if (nameInput) {
|
||||||
|
nameInput.focus();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (action === "toggle") {
|
||||||
|
handleToggle(code, button);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
applyPayload();
|
||||||
|
if (editorSection && editorSection.dataset.defaultCode) {
|
||||||
|
defaultCurrencyCode = uppercaseCode(editorSection.dataset.defaultCode);
|
||||||
|
currencies = currencies.map((record) => {
|
||||||
|
return record
|
||||||
|
? {
|
||||||
|
...record,
|
||||||
|
is_default: record.code === defaultCurrencyCode,
|
||||||
|
}
|
||||||
|
: record;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
apiBase = normalizeApiBase(apiBase);
|
||||||
|
|
||||||
|
refreshUI();
|
||||||
|
|
||||||
|
if (form) {
|
||||||
|
form.addEventListener("submit", handleSubmit);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingSelect) {
|
||||||
|
existingSelect.addEventListener("change", (event) => {
|
||||||
|
const selectedCode = uppercaseCode(event.target.value);
|
||||||
|
if (!selectedCode) {
|
||||||
|
hideFeedback();
|
||||||
|
resetFormState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const currency = findCurrency(selectedCode);
|
||||||
|
if (currency) {
|
||||||
|
setFormForCurrency(currency);
|
||||||
|
hideFeedback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resetButton) {
|
||||||
|
resetButton.addEventListener("click", (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
hideFeedback();
|
||||||
|
resetFormState();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (codeInput) {
|
||||||
|
codeInput.addEventListener("input", () => {
|
||||||
|
const value = uppercaseCode(codeInput.value).slice(0, 3);
|
||||||
|
codeInput.value = value;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tableBody) {
|
||||||
|
tableBody.addEventListener("click", handleTableClick);
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchCurrenciesFromApi();
|
||||||
|
});
|
||||||
200
static/js/settings.js
Normal file
200
static/js/settings.js
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
(function () {
|
||||||
|
const dataScript = document.getElementById("theme-settings-data");
|
||||||
|
const form = document.getElementById("theme-settings-form");
|
||||||
|
const feedbackEl = document.getElementById("theme-settings-feedback");
|
||||||
|
const resetBtn = document.getElementById("theme-settings-reset");
|
||||||
|
const panel = document.getElementById("theme-settings");
|
||||||
|
|
||||||
|
if (!dataScript || !form || !feedbackEl || !panel) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiUrl = panel.getAttribute("data-api");
|
||||||
|
if (!apiUrl) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = JSON.parse(dataScript.textContent || "{}");
|
||||||
|
const currentValues = { ...(parsed.variables || {}) };
|
||||||
|
const defaultValues = parsed.defaults || {};
|
||||||
|
let envOverrides = { ...(parsed.envOverrides || {}) };
|
||||||
|
|
||||||
|
const previewElements = new Map();
|
||||||
|
const inputs = Array.from(form.querySelectorAll(".color-value-input"));
|
||||||
|
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
const field = input.closest(".color-form-field");
|
||||||
|
const preview = field ? field.querySelector(".color-preview") : null;
|
||||||
|
if (preview) {
|
||||||
|
previewElements.set(input, preview);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.prototype.hasOwnProperty.call(envOverrides, key)) {
|
||||||
|
const overrideValue = envOverrides[key];
|
||||||
|
input.value = overrideValue;
|
||||||
|
input.disabled = true;
|
||||||
|
input.setAttribute("aria-disabled", "true");
|
||||||
|
input.dataset.envOverride = "true";
|
||||||
|
if (field) {
|
||||||
|
field.classList.add("is-env-override");
|
||||||
|
}
|
||||||
|
if (preview) {
|
||||||
|
preview.style.background = overrideValue;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
input.addEventListener("input", () => {
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = input.value || defaultValues[key] || "";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
function setFeedback(message, type) {
|
||||||
|
feedbackEl.textContent = message;
|
||||||
|
feedbackEl.classList.remove("hidden", "success", "error");
|
||||||
|
if (type) {
|
||||||
|
feedbackEl.classList.add(type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearFeedback() {
|
||||||
|
feedbackEl.textContent = "";
|
||||||
|
feedbackEl.classList.add("hidden");
|
||||||
|
feedbackEl.classList.remove("success", "error");
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateRootVariables(values) {
|
||||||
|
if (!values) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const root = document.documentElement;
|
||||||
|
Object.entries(values).forEach(([key, value]) => {
|
||||||
|
if (typeof key === "string" && typeof value === "string") {
|
||||||
|
root.style.setProperty(key, value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function resetTo(source) {
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
if (input.disabled) {
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
const fallback = envOverrides[key] || currentValues[key];
|
||||||
|
if (previewEl && fallback) {
|
||||||
|
previewEl.style.background = fallback;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
||||||
|
input.value = source[key];
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = source[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize previews to current values after page load.
|
||||||
|
resetTo(currentValues);
|
||||||
|
|
||||||
|
resetBtn?.addEventListener("click", () => {
|
||||||
|
resetTo(defaultValues);
|
||||||
|
clearFeedback();
|
||||||
|
setFeedback("Reverted to default values. Submit to save.", "success");
|
||||||
|
});
|
||||||
|
|
||||||
|
form.addEventListener("submit", async (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
clearFeedback();
|
||||||
|
|
||||||
|
const payload = {};
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
if (input.disabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
payload[input.name] = input.value.trim();
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(apiUrl, {
|
||||||
|
method: "PUT",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ variables: payload }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let detail = "Unable to save theme settings.";
|
||||||
|
try {
|
||||||
|
const errorData = await response.json();
|
||||||
|
if (errorData?.detail) {
|
||||||
|
detail = Array.isArray(errorData.detail)
|
||||||
|
? errorData.detail.map((item) => item.msg || item).join("; ")
|
||||||
|
: errorData.detail;
|
||||||
|
}
|
||||||
|
} catch (parseError) {
|
||||||
|
// Ignore JSON parse errors and use default detail message.
|
||||||
|
}
|
||||||
|
setFeedback(detail, "error");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
const variables = data?.variables || {};
|
||||||
|
const responseOverrides = data?.env_overrides || {};
|
||||||
|
|
||||||
|
Object.assign(currentValues, variables);
|
||||||
|
envOverrides = { ...responseOverrides };
|
||||||
|
|
||||||
|
inputs.forEach((input) => {
|
||||||
|
const key = input.name;
|
||||||
|
const field = input.closest(".color-form-field");
|
||||||
|
const previewEl = previewElements.get(input);
|
||||||
|
const isOverride = Object.prototype.hasOwnProperty.call(
|
||||||
|
envOverrides,
|
||||||
|
key,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isOverride) {
|
||||||
|
const overrideValue = envOverrides[key];
|
||||||
|
input.value = overrideValue;
|
||||||
|
if (!input.disabled) {
|
||||||
|
input.disabled = true;
|
||||||
|
input.setAttribute("aria-disabled", "true");
|
||||||
|
}
|
||||||
|
if (field) {
|
||||||
|
field.classList.add("is-env-override");
|
||||||
|
}
|
||||||
|
if (previewEl) {
|
||||||
|
previewEl.style.background = overrideValue;
|
||||||
|
}
|
||||||
|
} else if (input.disabled) {
|
||||||
|
input.disabled = false;
|
||||||
|
input.removeAttribute("aria-disabled");
|
||||||
|
if (field) {
|
||||||
|
field.classList.remove("is-env-override");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
previewEl &&
|
||||||
|
Object.prototype.hasOwnProperty.call(variables, key)
|
||||||
|
) {
|
||||||
|
previewEl.style.background = variables[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
updateRootVariables(variables);
|
||||||
|
resetTo(variables);
|
||||||
|
setFeedback("Theme colors updated successfully.", "success");
|
||||||
|
} catch (error) {
|
||||||
|
setFeedback("Network error: unable to save settings.", "error");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})();
|
||||||
131
templates/currencies.html
Normal file
131
templates/currencies.html
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% from "partials/components.html" import select_field, feedback, empty_state, table_container with context %}
|
||||||
|
|
||||||
|
{% block title %}Currencies · CalMiner{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<section class="panel" id="currencies-overview">
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Currency Overview</h2>
|
||||||
|
<p class="chart-subtitle">
|
||||||
|
Current availability of currencies for project inputs.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{% if currency_stats %}
|
||||||
|
<div class="dashboard-metrics-grid">
|
||||||
|
<article class="metric-card">
|
||||||
|
<span class="metric-label">Total Currencies</span>
|
||||||
|
<span class="metric-value" id="currency-metric-total">{{ currency_stats.total }}</span>
|
||||||
|
</article>
|
||||||
|
<article class="metric-card">
|
||||||
|
<span class="metric-label">Active</span>
|
||||||
|
<span class="metric-value" id="currency-metric-active">{{ currency_stats.active }}</span>
|
||||||
|
</article>
|
||||||
|
<article class="metric-card">
|
||||||
|
<span class="metric-label">Inactive</span>
|
||||||
|
<span class="metric-value" id="currency-metric-inactive">{{ currency_stats.inactive }}</span>
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
{% else %} {{ empty_state("currencies-overview-empty", "No currency data
|
||||||
|
available yet.") }} {% endif %} {% call table_container(
|
||||||
|
"currencies-table-container", aria_label="Configured currencies",
|
||||||
|
heading="Configured Currencies" ) %}
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Code</th>
|
||||||
|
<th scope="col">Name</th>
|
||||||
|
<th scope="col">Symbol</th>
|
||||||
|
<th scope="col">Status</th>
|
||||||
|
<th scope="col">Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="currencies-table-body"></tbody>
|
||||||
|
{% endcall %} {{ empty_state( "currencies-table-empty", "No currencies
|
||||||
|
configured yet.", hidden=currencies|length > 0 ) }}
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section
|
||||||
|
class="panel"
|
||||||
|
id="currencies-editor"
|
||||||
|
data-default-code="{{ default_currency_code }}"
|
||||||
|
>
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Manage Currencies</h2>
|
||||||
|
<p class="chart-subtitle">
|
||||||
|
Create new currencies or update existing configurations inline.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{% set status_options = [ {"id": "true", "name": "Active"}, {"id": "false",
|
||||||
|
"name": "Inactive"} ] %}
|
||||||
|
|
||||||
|
<form id="currency-form" class="form-grid" novalidate>
|
||||||
|
{{ select_field( "Currency to update (leave blank for new)",
|
||||||
|
"currency-form-existing", name="existing_code", options=currencies,
|
||||||
|
placeholder="Create a new currency", value_attr="code", label_attr="name" )
|
||||||
|
}}
|
||||||
|
|
||||||
|
<label for="currency-form-code">
|
||||||
|
Currency code
|
||||||
|
<input
|
||||||
|
id="currency-form-code"
|
||||||
|
name="code"
|
||||||
|
type="text"
|
||||||
|
maxlength="3"
|
||||||
|
required
|
||||||
|
autocomplete="off"
|
||||||
|
placeholder="e.g. USD"
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<label for="currency-form-name">
|
||||||
|
Currency name
|
||||||
|
<input
|
||||||
|
id="currency-form-name"
|
||||||
|
name="name"
|
||||||
|
type="text"
|
||||||
|
maxlength="128"
|
||||||
|
required
|
||||||
|
autocomplete="off"
|
||||||
|
placeholder="e.g. US Dollar"
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<label for="currency-form-symbol">
|
||||||
|
Currency symbol (optional)
|
||||||
|
<input
|
||||||
|
id="currency-form-symbol"
|
||||||
|
name="symbol"
|
||||||
|
type="text"
|
||||||
|
maxlength="8"
|
||||||
|
autocomplete="off"
|
||||||
|
placeholder="$"
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
{{ select_field( "Status", "currency-form-status", name="is_active",
|
||||||
|
options=status_options, include_blank=False ) }}
|
||||||
|
|
||||||
|
<div class="button-row">
|
||||||
|
<button type="submit" class="btn primary">Save Currency</button>
|
||||||
|
<button type="button" class="btn" id="currency-form-reset">Reset</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
{{ feedback("currency-form-feedback") }}
|
||||||
|
</section>
|
||||||
|
{% endblock %} {% block scripts %} {{ super() }}
|
||||||
|
<script id="currencies-data" type="application/json">
|
||||||
|
{{ {
|
||||||
|
"currencies": currencies,
|
||||||
|
"currency_stats": currency_stats,
|
||||||
|
"default_currency_code": default_currency_code,
|
||||||
|
"currency_api_base": currency_api_base
|
||||||
|
} | tojson }}
|
||||||
|
</script>
|
||||||
|
<script src="/static/js/currencies.js"></script>
|
||||||
|
{% endblock %}
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
<footer class="site-footer">
|
<footer class="site-footer">
|
||||||
<div class="container footer-inner">
|
<div class="container footer-inner">
|
||||||
<p>© {{ current_year }} CalMiner. All rights reserved.</p>
|
<p>
|
||||||
|
© {{ current_year }} CalMiner by
|
||||||
|
<a href="https://allucanget.biz/">AllYouCanGET</a>. All rights reserved.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</footer>
|
</footer>
|
||||||
|
|||||||
@@ -1,16 +1,3 @@
|
|||||||
{% set nav_links = [
|
|
||||||
("/", "Dashboard"),
|
|
||||||
("/ui/scenarios", "Scenarios"),
|
|
||||||
("/ui/parameters", "Parameters"),
|
|
||||||
("/ui/costs", "Costs"),
|
|
||||||
("/ui/consumption", "Consumption"),
|
|
||||||
("/ui/production", "Production"),
|
|
||||||
("/ui/equipment", "Equipment"),
|
|
||||||
("/ui/maintenance", "Maintenance"),
|
|
||||||
("/ui/simulations", "Simulations"),
|
|
||||||
("/ui/reporting", "Reporting"),
|
|
||||||
] %}
|
|
||||||
|
|
||||||
<div class="sidebar-inner">
|
<div class="sidebar-inner">
|
||||||
<div class="sidebar-brand">
|
<div class="sidebar-brand">
|
||||||
<span class="brand-logo" aria-hidden="true">CM</span>
|
<span class="brand-logo" aria-hidden="true">CM</span>
|
||||||
@@ -19,20 +6,5 @@
|
|||||||
<span class="brand-subtitle">Mining Planner</span>
|
<span class="brand-subtitle">Mining Planner</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<nav class="sidebar-nav" aria-label="Primary navigation">
|
{% include "partials/sidebar_nav.html" %}
|
||||||
{% set current_path = request.url.path if request else "" %}
|
|
||||||
{% for href, label in nav_links %}
|
|
||||||
{% if href == "/" %}
|
|
||||||
{% set is_active = current_path == "/" %}
|
|
||||||
{% else %}
|
|
||||||
{% set is_active = current_path.startswith(href) %}
|
|
||||||
{% endif %}
|
|
||||||
<a
|
|
||||||
href="{{ href }}"
|
|
||||||
class="sidebar-link{% if is_active %} is-active{% endif %}"
|
|
||||||
>
|
|
||||||
{{ label }}
|
|
||||||
</a>
|
|
||||||
{% endfor %}
|
|
||||||
</nav>
|
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
88
templates/partials/sidebar_nav.html
Normal file
88
templates/partials/sidebar_nav.html
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
{% set nav_groups = [
|
||||||
|
{
|
||||||
|
"label": "Dashboard",
|
||||||
|
"links": [
|
||||||
|
{"href": "/", "label": "Dashboard"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Scenarios",
|
||||||
|
"links": [
|
||||||
|
{"href": "/ui/scenarios", "label": "Overview"},
|
||||||
|
{"href": "/ui/parameters", "label": "Parameters"},
|
||||||
|
{"href": "/ui/costs", "label": "Costs"},
|
||||||
|
{"href": "/ui/consumption", "label": "Consumption"},
|
||||||
|
{"href": "/ui/production", "label": "Production"},
|
||||||
|
{
|
||||||
|
"href": "/ui/equipment",
|
||||||
|
"label": "Equipment",
|
||||||
|
"children": [
|
||||||
|
{"href": "/ui/maintenance", "label": "Maintenance"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Analysis",
|
||||||
|
"links": [
|
||||||
|
{"href": "/ui/simulations", "label": "Simulations"},
|
||||||
|
{"href": "/ui/reporting", "label": "Reporting"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Settings",
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"href": "/ui/settings",
|
||||||
|
"label": "Settings",
|
||||||
|
"children": [
|
||||||
|
{"href": "/ui/currencies", "label": "Currency Management"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
] %}
|
||||||
|
|
||||||
|
<nav class="sidebar-nav" aria-label="Primary navigation">
|
||||||
|
{% set current_path = request.url.path if request else "" %}
|
||||||
|
{% for group in nav_groups %}
|
||||||
|
<div class="sidebar-section">
|
||||||
|
<div class="sidebar-section-label">{{ group.label }}</div>
|
||||||
|
<div class="sidebar-section-links">
|
||||||
|
{% for link in group.links %}
|
||||||
|
{% set href = link.href %}
|
||||||
|
{% if href == "/" %}
|
||||||
|
{% set is_active = current_path == "/" %}
|
||||||
|
{% else %}
|
||||||
|
{% set is_active = current_path.startswith(href) %}
|
||||||
|
{% endif %}
|
||||||
|
<div class="sidebar-link-block">
|
||||||
|
<a
|
||||||
|
href="{{ href }}"
|
||||||
|
class="sidebar-link{% if is_active %} is-active{% endif %}"
|
||||||
|
>
|
||||||
|
{{ link.label }}
|
||||||
|
</a>
|
||||||
|
{% if link.children %}
|
||||||
|
<div class="sidebar-sublinks">
|
||||||
|
{% for child in link.children %}
|
||||||
|
{% if child.href == "/" %}
|
||||||
|
{% set child_active = current_path == "/" %}
|
||||||
|
{% else %}
|
||||||
|
{% set child_active = current_path.startswith(child.href) %}
|
||||||
|
{% endif %}
|
||||||
|
<a
|
||||||
|
href="{{ child.href }}"
|
||||||
|
class="sidebar-sublink{% if child_active %} is-active{% endif %}"
|
||||||
|
>
|
||||||
|
{{ child.label }}
|
||||||
|
</a>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</nav>
|
||||||
113
templates/settings.html
Normal file
113
templates/settings.html
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Settings · CalMiner{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<section class="page-header">
|
||||||
|
<div>
|
||||||
|
<h1>Settings</h1>
|
||||||
|
<p class="page-subtitle">Configure platform defaults and administrative options.</p>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
<section class="settings-grid">
|
||||||
|
<article class="settings-card">
|
||||||
|
<h2>Currency Management</h2>
|
||||||
|
<p>Manage available currencies, symbols, and default selections from the Currency Management page.</p>
|
||||||
|
<a class="button-link" href="/ui/currencies">Go to Currency Management</a>
|
||||||
|
</article>
|
||||||
|
<article class="settings-card">
|
||||||
|
<h2>Visual Theme</h2>
|
||||||
|
<p>Adjust CalMiner theme colors and preview changes instantly.</p>
|
||||||
|
<p class="settings-card-note">Changes save to the settings table and apply across the UI after submission. Environment overrides (if configured) remain read-only.</p>
|
||||||
|
</article>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="panel" id="theme-settings" data-api="/api/settings/css">
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Theme Colors</h2>
|
||||||
|
<p class="chart-subtitle">Update global CSS variables to customize CalMiner's appearance.</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
<form id="theme-settings-form" class="form-grid color-form-grid" novalidate>
|
||||||
|
{% for key, value in css_variables.items() %}
|
||||||
|
{% set env_meta = css_env_override_meta.get(key) %}
|
||||||
|
<label class="color-form-field{% if env_meta %} is-env-override{% endif %}" data-variable="{{ key }}">
|
||||||
|
<span class="color-field-header">
|
||||||
|
<span class="color-field-name">{{ key }}</span>
|
||||||
|
<span class="color-field-default">Default: {{ css_defaults[key] }}</span>
|
||||||
|
</span>
|
||||||
|
<span class="color-field-helper" id="color-helper-{{ loop.index }}">Accepts hex, rgb(a), or hsl(a) values.</span>
|
||||||
|
{% if env_meta %}
|
||||||
|
<span class="color-env-flag">Managed via {{ env_meta.env_var }} (read-only)</span>
|
||||||
|
{% endif %}
|
||||||
|
<span class="color-input-row">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="{{ key }}"
|
||||||
|
class="color-value-input"
|
||||||
|
value="{{ value }}"
|
||||||
|
autocomplete="off"
|
||||||
|
aria-describedby="color-helper-{{ loop.index }}"
|
||||||
|
{% if env_meta %}disabled aria-disabled="true" data-env-override="true"{% endif %}
|
||||||
|
/>
|
||||||
|
<span class="color-preview" aria-hidden="true" style="background: {{ value }}"></span>
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<div class="button-row">
|
||||||
|
<button type="submit" class="btn primary">Save Theme</button>
|
||||||
|
<button type="button" class="btn" id="theme-settings-reset">Reset to Defaults</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
{% from "partials/components.html" import feedback with context %}
|
||||||
|
{{ feedback("theme-settings-feedback") }}
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="panel" id="theme-env-overrides">
|
||||||
|
<header class="panel-header">
|
||||||
|
<div>
|
||||||
|
<h2>Environment Overrides</h2>
|
||||||
|
<p class="chart-subtitle">The following CSS variables are controlled via environment variables and take precedence over database values.</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
{% if css_env_override_rows %}
|
||||||
|
<div class="table-container env-overrides-table">
|
||||||
|
<table aria-label="Environment-controlled theme variables">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">CSS Variable</th>
|
||||||
|
<th scope="col">Environment Variable</th>
|
||||||
|
<th scope="col">Value</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for row in css_env_override_rows %}
|
||||||
|
<tr>
|
||||||
|
<td><code>{{ row.css_key }}</code></td>
|
||||||
|
<td><code>{{ row.env_var }}</code></td>
|
||||||
|
<td><code>{{ row.value }}</code></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<p class="empty-state">No environment overrides configured.</p>
|
||||||
|
{% endif %}
|
||||||
|
</section>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
{{ super() }}
|
||||||
|
<script id="theme-settings-data" type="application/json">
|
||||||
|
{{ {
|
||||||
|
"variables": css_variables,
|
||||||
|
"defaults": css_defaults,
|
||||||
|
"envOverrides": css_env_overrides,
|
||||||
|
"envSources": css_env_override_rows
|
||||||
|
} | tojson }}
|
||||||
|
</script>
|
||||||
|
<script src="/static/js/settings.js"></script>
|
||||||
|
{% endblock %}
|
||||||
@@ -39,7 +39,7 @@ def live_server() -> Generator[str, None, None]:
|
|||||||
if process.poll() is not None:
|
if process.poll() is not None:
|
||||||
raise RuntimeError("uvicorn server exited before becoming ready")
|
raise RuntimeError("uvicorn server exited before becoming ready")
|
||||||
try:
|
try:
|
||||||
response = httpx.get(BASE_URL, timeout=1.0)
|
response = httpx.get(BASE_URL, timeout=1.0, trust_env=False)
|
||||||
if response.status_code < 500:
|
if response.status_code < 500:
|
||||||
break
|
break
|
||||||
except Exception as exc: # noqa: BLE001
|
except Exception as exc: # noqa: BLE001
|
||||||
@@ -64,6 +64,40 @@ def live_server() -> Generator[str, None, None]:
|
|||||||
process.wait(timeout=5)
|
process.wait(timeout=5)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
|
def seed_default_currencies(live_server: str) -> None:
|
||||||
|
"""Ensure a baseline set of currencies exists for UI flows."""
|
||||||
|
|
||||||
|
seeds = [
|
||||||
|
{"code": "EUR", "name": "Euro", "symbol": "EUR", "is_active": True},
|
||||||
|
{"code": "CLP", "name": "Chilean Peso", "symbol": "CLP$", "is_active": True},
|
||||||
|
]
|
||||||
|
|
||||||
|
with httpx.Client(base_url=live_server, timeout=5.0, trust_env=False) as client:
|
||||||
|
try:
|
||||||
|
response = client.get("/api/currencies/?include_inactive=true")
|
||||||
|
response.raise_for_status()
|
||||||
|
existing_codes = {
|
||||||
|
str(item.get("code"))
|
||||||
|
for item in response.json()
|
||||||
|
if isinstance(item, dict) and item.get("code")
|
||||||
|
}
|
||||||
|
except httpx.HTTPError as exc: # noqa: BLE001
|
||||||
|
raise RuntimeError("Failed to read existing currencies") from exc
|
||||||
|
|
||||||
|
for payload in seeds:
|
||||||
|
if payload["code"] in existing_codes:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
create_response = client.post("/api/currencies/", json=payload)
|
||||||
|
except httpx.HTTPError as exc: # noqa: BLE001
|
||||||
|
raise RuntimeError("Failed to seed currencies") from exc
|
||||||
|
|
||||||
|
if create_response.status_code == 409:
|
||||||
|
continue
|
||||||
|
create_response.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def playwright_instance() -> Generator[Playwright, None, None]:
|
def playwright_instance() -> Generator[Playwright, None, None]:
|
||||||
"""Provide a Playwright instance for the test session."""
|
"""Provide a Playwright instance for the test session."""
|
||||||
|
|||||||
130
tests/e2e/test_currencies.py
Normal file
130
tests/e2e/test_currencies.py
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
from playwright.sync_api import Page, expect
|
||||||
|
|
||||||
|
|
||||||
|
def _unique_currency_code(existing: set[str]) -> str:
|
||||||
|
"""Generate a unique three-letter code not present in *existing*."""
|
||||||
|
alphabet = string.ascii_uppercase
|
||||||
|
for _ in range(100):
|
||||||
|
candidate = "".join(random.choices(alphabet, k=3))
|
||||||
|
if candidate not in existing and candidate != "USD":
|
||||||
|
return candidate
|
||||||
|
raise AssertionError(
|
||||||
|
"Unable to generate a unique currency code for the test run.")
|
||||||
|
|
||||||
|
|
||||||
|
def _metric_value(page: Page, element_id: str) -> int:
|
||||||
|
locator = page.locator(f"#{element_id}")
|
||||||
|
expect(locator).to_be_visible()
|
||||||
|
return int(locator.inner_text().strip())
|
||||||
|
|
||||||
|
|
||||||
|
def _expect_feedback(page: Page, expected_text: str) -> None:
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => {"
|
||||||
|
" const el = document.getElementById('currency-form-feedback');"
|
||||||
|
" if (!el) return false;"
|
||||||
|
" const text = (el.textContent || '').trim();"
|
||||||
|
" return !el.classList.contains('hidden') && text === expected;"
|
||||||
|
"}",
|
||||||
|
arg=expected_text,
|
||||||
|
)
|
||||||
|
feedback = page.locator("#currency-form-feedback")
|
||||||
|
expect(feedback).to_have_text(expected_text)
|
||||||
|
|
||||||
|
|
||||||
|
def test_currency_workflow_create_update_toggle(page: Page) -> None:
|
||||||
|
"""Exercise create, update, and toggle flows on the currency settings page."""
|
||||||
|
page.goto("/ui/currencies")
|
||||||
|
expect(page).to_have_title("Currencies · CalMiner")
|
||||||
|
expect(page.locator("h2:has-text('Currency Overview')")).to_be_visible()
|
||||||
|
|
||||||
|
code_cells = page.locator("#currencies-table-body tr td:nth-child(1)")
|
||||||
|
existing_codes = {text.strip().upper()
|
||||||
|
for text in code_cells.all_inner_texts()}
|
||||||
|
|
||||||
|
total_before = _metric_value(page, "currency-metric-total")
|
||||||
|
active_before = _metric_value(page, "currency-metric-active")
|
||||||
|
inactive_before = _metric_value(page, "currency-metric-inactive")
|
||||||
|
|
||||||
|
new_code = _unique_currency_code(existing_codes)
|
||||||
|
new_name = f"Test Currency {new_code}"
|
||||||
|
new_symbol = new_code[0]
|
||||||
|
|
||||||
|
page.fill("#currency-form-code", new_code)
|
||||||
|
page.fill("#currency-form-name", new_name)
|
||||||
|
page.fill("#currency-form-symbol", new_symbol)
|
||||||
|
page.select_option("#currency-form-status", "true")
|
||||||
|
|
||||||
|
with page.expect_response("**/api/currencies/") as create_info:
|
||||||
|
page.click("button[type='submit']")
|
||||||
|
create_response = create_info.value
|
||||||
|
assert create_response.status == 201
|
||||||
|
|
||||||
|
_expect_feedback(page, "Currency created successfully.")
|
||||||
|
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-total').textContent.trim()) === expected",
|
||||||
|
arg=total_before + 1,
|
||||||
|
)
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected",
|
||||||
|
arg=active_before + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
row = page.locator("#currencies-table-body tr").filter(has_text=new_code)
|
||||||
|
expect(row).to_be_visible()
|
||||||
|
expect(row.locator("td").nth(3)).to_have_text("Active")
|
||||||
|
|
||||||
|
# Switch to update mode using the existing currency option.
|
||||||
|
page.select_option("#currency-form-existing", new_code)
|
||||||
|
updated_name = f"{new_name} Updated"
|
||||||
|
updated_symbol = f"{new_symbol}$"
|
||||||
|
page.fill("#currency-form-name", updated_name)
|
||||||
|
page.fill("#currency-form-symbol", updated_symbol)
|
||||||
|
page.select_option("#currency-form-status", "false")
|
||||||
|
|
||||||
|
with page.expect_response(f"**/api/currencies/{new_code}") as update_info:
|
||||||
|
page.click("button[type='submit']")
|
||||||
|
update_response = update_info.value
|
||||||
|
assert update_response.status == 200
|
||||||
|
|
||||||
|
_expect_feedback(page, "Currency updated successfully.")
|
||||||
|
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected",
|
||||||
|
arg=active_before,
|
||||||
|
)
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-inactive').textContent.trim()) === expected",
|
||||||
|
arg=inactive_before + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(row.locator("td").nth(1)).to_have_text(updated_name)
|
||||||
|
expect(row.locator("td").nth(2)).to_have_text(updated_symbol)
|
||||||
|
expect(row.locator("td").nth(3)).to_contain_text("Inactive")
|
||||||
|
|
||||||
|
toggle_button = row.locator("button[data-action='toggle']")
|
||||||
|
expect(toggle_button).to_have_text("Activate")
|
||||||
|
|
||||||
|
with page.expect_response(f"**/api/currencies/{new_code}/activation") as toggle_info:
|
||||||
|
toggle_button.click()
|
||||||
|
toggle_response = toggle_info.value
|
||||||
|
assert toggle_response.status == 200
|
||||||
|
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-active').textContent.trim()) === expected",
|
||||||
|
arg=active_before + 1,
|
||||||
|
)
|
||||||
|
page.wait_for_function(
|
||||||
|
"expected => Number(document.getElementById('currency-metric-inactive').textContent.trim()) === expected",
|
||||||
|
arg=inactive_before,
|
||||||
|
)
|
||||||
|
|
||||||
|
_expect_feedback(page, f"Currency {new_code} activated.")
|
||||||
|
|
||||||
|
expect(row.locator("td").nth(3)).to_contain_text("Active")
|
||||||
|
expect(row.locator("button[data-action='toggle']")
|
||||||
|
).to_have_text("Deactivate")
|
||||||
@@ -7,6 +7,7 @@ UI_ROUTES = [
|
|||||||
("/ui/dashboard", "Dashboard · CalMiner", "Operations Overview"),
|
("/ui/dashboard", "Dashboard · CalMiner", "Operations Overview"),
|
||||||
("/ui/scenarios", "Scenario Management · CalMiner", "Create a New Scenario"),
|
("/ui/scenarios", "Scenario Management · CalMiner", "Create a New Scenario"),
|
||||||
("/ui/parameters", "Process Parameters · CalMiner", "Scenario Parameters"),
|
("/ui/parameters", "Process Parameters · CalMiner", "Scenario Parameters"),
|
||||||
|
("/ui/settings", "Settings · CalMiner", "Settings"),
|
||||||
("/ui/costs", "Costs · CalMiner", "Cost Overview"),
|
("/ui/costs", "Costs · CalMiner", "Cost Overview"),
|
||||||
("/ui/consumption", "Consumption · CalMiner", "Consumption Tracking"),
|
("/ui/consumption", "Consumption · CalMiner", "Consumption Tracking"),
|
||||||
("/ui/production", "Production · CalMiner", "Production Output"),
|
("/ui/production", "Production · CalMiner", "Production Output"),
|
||||||
@@ -14,6 +15,7 @@ UI_ROUTES = [
|
|||||||
("/ui/maintenance", "Maintenance · CalMiner", "Maintenance Schedule"),
|
("/ui/maintenance", "Maintenance · CalMiner", "Maintenance Schedule"),
|
||||||
("/ui/simulations", "Simulations · CalMiner", "Monte Carlo Simulations"),
|
("/ui/simulations", "Simulations · CalMiner", "Monte Carlo Simulations"),
|
||||||
("/ui/reporting", "Reporting · CalMiner", "Scenario KPI Summary"),
|
("/ui/reporting", "Reporting · CalMiner", "Scenario KPI Summary"),
|
||||||
|
("/ui/currencies", "Currencies · CalMiner", "Currency Overview"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -26,3 +28,45 @@ def test_ui_pages_load_correctly(page: Page, url: str, title: str, heading: str)
|
|||||||
heading_locator = page.locator(
|
heading_locator = page.locator(
|
||||||
f"h1:has-text('{heading}'), h2:has-text('{heading}')")
|
f"h1:has-text('{heading}'), h2:has-text('{heading}')")
|
||||||
expect(heading_locator.first).to_be_visible()
|
expect(heading_locator.first).to_be_visible()
|
||||||
|
|
||||||
|
|
||||||
|
def test_settings_theme_form_interaction(page: Page):
|
||||||
|
page.goto("/ui/settings")
|
||||||
|
expect(page).to_have_title("Settings · CalMiner")
|
||||||
|
|
||||||
|
env_rows = page.locator("#theme-env-overrides tbody tr")
|
||||||
|
disabled_inputs = page.locator(
|
||||||
|
"#theme-settings-form input.color-value-input[disabled]")
|
||||||
|
env_row_count = env_rows.count()
|
||||||
|
disabled_count = disabled_inputs.count()
|
||||||
|
assert disabled_count == env_row_count
|
||||||
|
|
||||||
|
color_input = page.locator(
|
||||||
|
"#theme-settings-form input[name='--color-primary']")
|
||||||
|
expect(color_input).to_be_visible()
|
||||||
|
expect(color_input).to_be_enabled()
|
||||||
|
|
||||||
|
original_value = color_input.input_value()
|
||||||
|
candidate_values = ("#114455", "#225566")
|
||||||
|
new_value = candidate_values[0] if original_value != candidate_values[0] else candidate_values[1]
|
||||||
|
|
||||||
|
color_input.fill(new_value)
|
||||||
|
page.click("#theme-settings-form button[type='submit']")
|
||||||
|
|
||||||
|
feedback = page.locator("#theme-settings-feedback")
|
||||||
|
expect(feedback).to_contain_text("updated successfully")
|
||||||
|
|
||||||
|
computed_color = page.evaluate(
|
||||||
|
"() => getComputedStyle(document.documentElement).getPropertyValue('--color-primary').trim()"
|
||||||
|
)
|
||||||
|
assert computed_color.lower() == new_value.lower()
|
||||||
|
|
||||||
|
page.reload()
|
||||||
|
expect(color_input).to_have_value(new_value)
|
||||||
|
|
||||||
|
color_input.fill(original_value)
|
||||||
|
page.click("#theme-settings-form button[type='submit']")
|
||||||
|
expect(feedback).to_contain_text("updated successfully")
|
||||||
|
|
||||||
|
page.reload()
|
||||||
|
expect(color_input).to_have_value(original_value)
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ TestingSessionLocal = sessionmaker(
|
|||||||
def setup_database() -> Generator[None, None, None]:
|
def setup_database() -> Generator[None, None, None]:
|
||||||
# Ensure all model metadata is registered before creating tables
|
# Ensure all model metadata is registered before creating tables
|
||||||
from models import (
|
from models import (
|
||||||
|
application_setting,
|
||||||
capex,
|
capex,
|
||||||
consumption,
|
consumption,
|
||||||
distribution,
|
distribution,
|
||||||
@@ -52,6 +53,7 @@ def setup_database() -> Generator[None, None, None]:
|
|||||||
distribution,
|
distribution,
|
||||||
equipment,
|
equipment,
|
||||||
maintenance,
|
maintenance,
|
||||||
|
application_setting,
|
||||||
opex,
|
opex,
|
||||||
parameters,
|
parameters,
|
||||||
production_output,
|
production_output,
|
||||||
@@ -66,10 +68,13 @@ def setup_database() -> Generator[None, None, None]:
|
|||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def db_session() -> Generator[Session, None, None]:
|
def db_session() -> Generator[Session, None, None]:
|
||||||
|
Base.metadata.drop_all(bind=engine)
|
||||||
|
Base.metadata.create_all(bind=engine)
|
||||||
session = TestingSessionLocal()
|
session = TestingSessionLocal()
|
||||||
try:
|
try:
|
||||||
yield session
|
yield session
|
||||||
finally:
|
finally:
|
||||||
|
session.rollback()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
101
tests/unit/test_currencies.py
Normal file
101
tests/unit/test_currencies.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from models.currency import Currency
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def _cleanup_currencies(db_session):
|
||||||
|
db_session.query(Currency).delete()
|
||||||
|
db_session.commit()
|
||||||
|
yield
|
||||||
|
db_session.query(Currency).delete()
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_currency(payload: Dict[str, object], code: str, name: str, symbol: str | None, is_active: bool) -> None:
|
||||||
|
assert payload["code"] == code
|
||||||
|
assert payload["name"] == name
|
||||||
|
assert payload["is_active"] is is_active
|
||||||
|
if symbol is None:
|
||||||
|
assert payload["symbol"] is None
|
||||||
|
else:
|
||||||
|
assert payload["symbol"] == symbol
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_returns_default_currency(api_client, db_session):
|
||||||
|
response = api_client.get("/api/currencies/")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert any(item["code"] == "USD" for item in data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_currency_success(api_client, db_session):
|
||||||
|
payload = {"code": "EUR", "name": "Euro", "symbol": "€", "is_active": True}
|
||||||
|
response = api_client.post("/api/currencies/", json=payload)
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
_assert_currency(data, "EUR", "Euro", "€", True)
|
||||||
|
|
||||||
|
stored = db_session.query(Currency).filter_by(code="EUR").one()
|
||||||
|
assert stored.name == "Euro"
|
||||||
|
assert stored.symbol == "€"
|
||||||
|
assert stored.is_active is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_currency_conflict(api_client, db_session):
|
||||||
|
api_client.post(
|
||||||
|
"/api/currencies/",
|
||||||
|
json={"code": "CAD", "name": "Canadian Dollar",
|
||||||
|
"symbol": "$", "is_active": True},
|
||||||
|
)
|
||||||
|
duplicate = api_client.post(
|
||||||
|
"/api/currencies/",
|
||||||
|
json={"code": "CAD", "name": "Canadian Dollar",
|
||||||
|
"symbol": "$", "is_active": True},
|
||||||
|
)
|
||||||
|
assert duplicate.status_code == 409
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_currency_fields(api_client, db_session):
|
||||||
|
api_client.post(
|
||||||
|
"/api/currencies/",
|
||||||
|
json={"code": "GBP", "name": "British Pound",
|
||||||
|
"symbol": "£", "is_active": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = api_client.put(
|
||||||
|
"/api/currencies/GBP",
|
||||||
|
json={"name": "Pound Sterling", "symbol": "£", "is_active": False},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
_assert_currency(data, "GBP", "Pound Sterling", "£", False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_toggle_currency_activation(api_client, db_session):
|
||||||
|
api_client.post(
|
||||||
|
"/api/currencies/",
|
||||||
|
json={"code": "AUD", "name": "Australian Dollar",
|
||||||
|
"symbol": "A$", "is_active": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
response = api_client.patch(
|
||||||
|
"/api/currencies/AUD/activation",
|
||||||
|
json={"is_active": False},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
_assert_currency(data, "AUD", "Australian Dollar", "A$", False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_currency_cannot_be_deactivated(api_client, db_session):
|
||||||
|
api_client.get("/api/currencies/")
|
||||||
|
response = api_client.patch(
|
||||||
|
"/api/currencies/USD/activation",
|
||||||
|
json={"is_active": False},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json()[
|
||||||
|
"detail"] == "The default currency cannot be deactivated."
|
||||||
@@ -1,54 +1,74 @@
|
|||||||
from tests.unit.conftest import client
|
from uuid import uuid4
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from models.currency import Currency
|
||||||
|
|
||||||
|
|
||||||
def test_create_capex_with_currency_code_and_list():
|
@pytest.fixture
|
||||||
# create scenario first (reuse helper from other tests)
|
def seeded_currency(db_session):
|
||||||
from tests.unit.test_costs import _create_scenario
|
currency = Currency(code="GBP", name="British Pound", symbol="GBP")
|
||||||
|
db_session.add(currency)
|
||||||
|
db_session.commit()
|
||||||
|
db_session.refresh(currency)
|
||||||
|
|
||||||
sid = _create_scenario()
|
try:
|
||||||
|
yield currency
|
||||||
|
finally:
|
||||||
|
db_session.delete(currency)
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _create_scenario(api_client):
|
||||||
|
payload = {
|
||||||
|
"name": f"CurrencyScenario-{uuid4()}",
|
||||||
|
"description": "Currency workflow scenario",
|
||||||
|
}
|
||||||
|
resp = api_client.post("/api/scenarios/", json=payload)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
return resp.json()["id"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_capex_with_currency_code_and_list(api_client, seeded_currency):
|
||||||
|
sid = _create_scenario(api_client)
|
||||||
|
|
||||||
# create with currency_code
|
|
||||||
payload = {
|
payload = {
|
||||||
"scenario_id": sid,
|
"scenario_id": sid,
|
||||||
"amount": 500.0,
|
"amount": 500.0,
|
||||||
"description": "Capex with GBP",
|
"description": "Capex with GBP",
|
||||||
"currency_code": "GBP",
|
"currency_code": seeded_currency.code,
|
||||||
}
|
}
|
||||||
resp = client.post("/api/costs/capex", json=payload)
|
resp = api_client.post("/api/costs/capex", json=payload)
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
assert data["currency_code"] == "GBP" or data.get(
|
assert data.get("currency_code") == seeded_currency.code or data.get(
|
||||||
"currency", {}).get("code") == "GBP"
|
"currency", {}
|
||||||
|
).get("code") == seeded_currency.code
|
||||||
|
|
||||||
|
|
||||||
def test_create_opex_with_currency_id():
|
def test_create_opex_with_currency_id(api_client, seeded_currency):
|
||||||
from tests.unit.test_costs import _create_scenario
|
sid = _create_scenario(api_client)
|
||||||
from routes.currencies import list_currencies
|
|
||||||
|
|
||||||
sid = _create_scenario()
|
resp = api_client.get("/api/currencies/")
|
||||||
|
|
||||||
# fetch currencies to get an id
|
|
||||||
resp = client.get("/api/currencies/")
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
currencies = resp.json()
|
currencies = resp.json()
|
||||||
assert len(currencies) > 0
|
assert any(c["id"] == seeded_currency.id for c in currencies)
|
||||||
cid = currencies[0]["id"]
|
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"scenario_id": sid,
|
"scenario_id": sid,
|
||||||
"amount": 120.0,
|
"amount": 120.0,
|
||||||
"description": "Opex with explicit id",
|
"description": "Opex with explicit id",
|
||||||
"currency_id": cid,
|
"currency_id": seeded_currency.id,
|
||||||
}
|
}
|
||||||
resp = client.post("/api/costs/opex", json=payload)
|
resp = api_client.post("/api/costs/opex", json=payload)
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
assert data["currency_id"] == cid
|
assert data["currency_id"] == seeded_currency.id
|
||||||
|
|
||||||
|
|
||||||
def test_list_currencies_endpoint():
|
def test_list_currencies_endpoint(api_client, seeded_currency):
|
||||||
resp = client.get("/api/currencies/")
|
resp = api_client.get("/api/currencies/")
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
assert isinstance(data, list)
|
assert isinstance(data, list)
|
||||||
assert all("id" in c and "code" in c for c in data)
|
assert any(c["id"] == seeded_currency.id for c in data)
|
||||||
|
|||||||
53
tests/unit/test_settings_routes.py
Normal file
53
tests/unit/test_settings_routes.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from services import settings as settings_service
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_read_css_settings_reflects_env_overrides(
|
||||||
|
api_client: TestClient, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
|
env_var = settings_service.css_key_to_env_var("--color-background")
|
||||||
|
monkeypatch.setenv(env_var, "#123456")
|
||||||
|
|
||||||
|
response = api_client.get("/api/settings/css")
|
||||||
|
assert response.status_code == 200
|
||||||
|
body = response.json()
|
||||||
|
|
||||||
|
assert body["variables"]["--color-background"] == "#123456"
|
||||||
|
assert body["env_overrides"]["--color-background"] == "#123456"
|
||||||
|
assert any(
|
||||||
|
source["env_var"] == env_var and source["value"] == "#123456"
|
||||||
|
for source in body["env_sources"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_update_css_settings_persists_changes(
|
||||||
|
api_client: TestClient, db_session: Session
|
||||||
|
) -> None:
|
||||||
|
payload = {"variables": {"--color-primary": "#112233"}}
|
||||||
|
|
||||||
|
response = api_client.put("/api/settings/css", json=payload)
|
||||||
|
assert response.status_code == 200
|
||||||
|
body = response.json()
|
||||||
|
|
||||||
|
assert body["variables"]["--color-primary"] == "#112233"
|
||||||
|
|
||||||
|
persisted = settings_service.get_css_color_settings(db_session)
|
||||||
|
assert persisted["--color-primary"] == "#112233"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("db_session")
|
||||||
|
def test_update_css_settings_invalid_value_returns_422(
|
||||||
|
api_client: TestClient
|
||||||
|
) -> None:
|
||||||
|
response = api_client.put(
|
||||||
|
"/api/settings/css",
|
||||||
|
json={"variables": {"--color-primary": "not-a-color"}},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
body = response.json()
|
||||||
|
assert "color" in body["detail"].lower()
|
||||||
137
tests/unit/test_settings_service.py
Normal file
137
tests/unit/test_settings_service.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
from types import SimpleNamespace
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from models.application_setting import ApplicationSetting
|
||||||
|
from services import settings as settings_service
|
||||||
|
from services.settings import CSS_COLOR_DEFAULTS
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="clean_env")
|
||||||
|
def fixture_clean_env(monkeypatch: pytest.MonkeyPatch) -> Dict[str, str]:
|
||||||
|
"""Provide an isolated environment mapping for tests."""
|
||||||
|
|
||||||
|
env: Dict[str, str] = {}
|
||||||
|
monkeypatch.setattr(settings_service, "os", SimpleNamespace(environ=env))
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
def test_css_key_to_env_var_formatting():
|
||||||
|
assert settings_service.css_key_to_env_var("--color-background") == "CALMINER_THEME_COLOR_BACKGROUND"
|
||||||
|
assert settings_service.css_key_to_env_var("--color-primary-stronger") == "CALMINER_THEME_COLOR_PRIMARY_STRONGER"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"env_key,env_value",
|
||||||
|
[
|
||||||
|
("--color-background", "#ffffff"),
|
||||||
|
("--color-primary", "rgb(10, 20, 30)"),
|
||||||
|
("--color-accent", "rgba(1,2,3,0.5)"),
|
||||||
|
("--color-text-secondary", "hsla(210, 40%, 40%, 1)"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_css_color_env_overrides_valid_values(clean_env, env_key, env_value):
|
||||||
|
env_var = settings_service.css_key_to_env_var(env_key)
|
||||||
|
clean_env[env_var] = env_value
|
||||||
|
|
||||||
|
overrides = settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
assert overrides[env_key] == env_value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"invalid_value",
|
||||||
|
[
|
||||||
|
"", # empty
|
||||||
|
"not-a-color", # arbitrary string
|
||||||
|
"#12", # short hex
|
||||||
|
"rgb(1,2)", # malformed rgb
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_css_color_env_overrides_invalid_values_raise(clean_env, invalid_value):
|
||||||
|
env_var = settings_service.css_key_to_env_var("--color-background")
|
||||||
|
clean_env[env_var] = invalid_value
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_css_color_env_overrides_ignores_missing(clean_env):
|
||||||
|
overrides = settings_service.read_css_color_env_overrides(clean_env)
|
||||||
|
assert overrides == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_css_env_override_rows_returns_structured_data(clean_env):
|
||||||
|
clean_env[settings_service.css_key_to_env_var("--color-primary")] = "#123456"
|
||||||
|
rows = settings_service.list_css_env_override_rows(clean_env)
|
||||||
|
assert rows == [
|
||||||
|
{
|
||||||
|
"css_key": "--color-primary",
|
||||||
|
"env_var": settings_service.css_key_to_env_var("--color-primary"),
|
||||||
|
"value": "#123456",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_color_value_strips_and_validates():
|
||||||
|
assert settings_service._normalize_color_value(" #abcdef ") == "#abcdef"
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value(123) # type: ignore[arg-type]
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value(" ")
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
settings_service._normalize_color_value("#12")
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_css_color_settings_creates_defaults(db_session: Session):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
|
||||||
|
stored = {
|
||||||
|
record.key: record.value
|
||||||
|
for record in db_session.query(ApplicationSetting).all()
|
||||||
|
}
|
||||||
|
assert set(stored.keys()) == set(CSS_COLOR_DEFAULTS.keys())
|
||||||
|
assert stored == CSS_COLOR_DEFAULTS
|
||||||
|
|
||||||
|
|
||||||
|
def test_update_css_color_settings_persists_changes(db_session: Session):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
|
||||||
|
updated = settings_service.update_css_color_settings(
|
||||||
|
db_session,
|
||||||
|
{"--color-background": "#000000", "--color-accent": "#abcdef"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert updated["--color-background"] == "#000000"
|
||||||
|
assert updated["--color-accent"] == "#abcdef"
|
||||||
|
|
||||||
|
stored = {
|
||||||
|
record.key: record.value
|
||||||
|
for record in db_session.query(ApplicationSetting).all()
|
||||||
|
}
|
||||||
|
assert stored["--color-background"] == "#000000"
|
||||||
|
assert stored["--color-accent"] == "#abcdef"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_css_color_settings_respects_env_overrides(
|
||||||
|
db_session: Session, clean_env: Dict[str, str]
|
||||||
|
):
|
||||||
|
settings_service.ensure_css_color_settings(db_session)
|
||||||
|
override_value = "#112233"
|
||||||
|
clean_env[settings_service.css_key_to_env_var("--color-background")] = (
|
||||||
|
override_value
|
||||||
|
)
|
||||||
|
|
||||||
|
values = settings_service.get_css_color_settings(db_session)
|
||||||
|
|
||||||
|
assert values["--color-background"] == override_value
|
||||||
|
|
||||||
|
db_value = (
|
||||||
|
db_session.query(ApplicationSetting)
|
||||||
|
.filter_by(key="--color-background")
|
||||||
|
.one()
|
||||||
|
.value
|
||||||
|
)
|
||||||
|
assert db_value != override_value
|
||||||
459
tests/unit/test_setup_database.py
Normal file
459
tests/unit/test_setup_database.py
Normal file
@@ -0,0 +1,459 @@
|
|||||||
|
import argparse
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
import pytest
|
||||||
|
from psycopg2 import errors as psycopg_errors
|
||||||
|
|
||||||
|
import scripts.setup_database as setup_db_module
|
||||||
|
|
||||||
|
from scripts import seed_data
|
||||||
|
from scripts.setup_database import DatabaseConfig, DatabaseSetup
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def mock_config() -> DatabaseConfig:
|
||||||
|
return DatabaseConfig(
|
||||||
|
driver="postgresql",
|
||||||
|
host="localhost",
|
||||||
|
port=5432,
|
||||||
|
database="calminer_test",
|
||||||
|
user="calminer",
|
||||||
|
password="secret",
|
||||||
|
schema="public",
|
||||||
|
admin_user="postgres",
|
||||||
|
admin_password="secret",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def setup_instance(mock_config: DatabaseConfig) -> DatabaseSetup:
|
||||||
|
return DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_seed_baseline_data_dry_run_skips_verification(setup_instance: DatabaseSetup) -> None:
|
||||||
|
with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object(
|
||||||
|
setup_instance, "_verify_seeded_data"
|
||||||
|
) as verify_mock:
|
||||||
|
setup_instance.seed_baseline_data(dry_run=True)
|
||||||
|
|
||||||
|
seed_run.assert_called_once()
|
||||||
|
namespace_arg = seed_run.call_args[0][0]
|
||||||
|
assert isinstance(namespace_arg, argparse.Namespace)
|
||||||
|
assert namespace_arg.dry_run is True
|
||||||
|
assert namespace_arg.currencies is True
|
||||||
|
assert namespace_arg.units is True
|
||||||
|
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
||||||
|
verify_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_seed_baseline_data_invokes_verification(setup_instance: DatabaseSetup) -> None:
|
||||||
|
expected_currencies = {code for code, *_ in seed_data.CURRENCY_SEEDS}
|
||||||
|
expected_units = {code for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS}
|
||||||
|
|
||||||
|
with mock.patch("scripts.seed_data.run_with_namespace") as seed_run, mock.patch.object(
|
||||||
|
setup_instance, "_verify_seeded_data"
|
||||||
|
) as verify_mock:
|
||||||
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
|
|
||||||
|
seed_run.assert_called_once()
|
||||||
|
namespace_arg = seed_run.call_args[0][0]
|
||||||
|
assert isinstance(namespace_arg, argparse.Namespace)
|
||||||
|
assert namespace_arg.dry_run is False
|
||||||
|
assert seed_run.call_args.kwargs["config"] is setup_instance.config
|
||||||
|
verify_mock.assert_called_once_with(
|
||||||
|
expected_currency_codes=expected_currencies,
|
||||||
|
expected_unit_codes=expected_units,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_migrations_applies_baseline_when_missing(mock_config: DatabaseConfig, tmp_path) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
baseline = tmp_path / "000_base.sql"
|
||||||
|
baseline.write_text("SELECT 1;", encoding="utf-8")
|
||||||
|
other_migration = tmp_path / "20251022_add_other.sql"
|
||||||
|
other_migration.write_text("SELECT 2;", encoding="utf-8")
|
||||||
|
|
||||||
|
migration_calls: list[str] = []
|
||||||
|
|
||||||
|
def capture_migration(cursor, schema_name: str, path):
|
||||||
|
migration_calls.append(path.name)
|
||||||
|
return path.name
|
||||||
|
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
connection_mock.__enter__.return_value = connection_mock
|
||||||
|
cursor_context = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
cursor_context.__enter__.return_value = cursor_mock
|
||||||
|
connection_mock.cursor.return_value = cursor_context
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_migrations_table_exists", return_value=True
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_fetch_applied_migrations", return_value=set()
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_apply_migration_file", side_effect=capture_migration
|
||||||
|
) as apply_mock:
|
||||||
|
setup_instance.run_migrations(tmp_path)
|
||||||
|
|
||||||
|
assert apply_mock.call_count == 1
|
||||||
|
assert migration_calls == ["000_base.sql"]
|
||||||
|
legacy_marked = any(
|
||||||
|
call.args[1] == ("20251022_add_other.sql",)
|
||||||
|
for call in cursor_mock.execute.call_args_list
|
||||||
|
if len(call.args) == 2
|
||||||
|
)
|
||||||
|
assert legacy_marked
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_migrations_noop_when_all_files_already_applied(
|
||||||
|
mock_config: DatabaseConfig, tmp_path
|
||||||
|
) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
baseline = tmp_path / "000_base.sql"
|
||||||
|
baseline.write_text("SELECT 1;", encoding="utf-8")
|
||||||
|
other_migration = tmp_path / "20251022_add_other.sql"
|
||||||
|
other_migration.write_text("SELECT 2;", encoding="utf-8")
|
||||||
|
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_migrations_table_exists", return_value=True
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_fetch_applied_migrations",
|
||||||
|
return_value={"000_base.sql", "20251022_add_other.sql"},
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_apply_migration_file"
|
||||||
|
) as apply_mock:
|
||||||
|
setup_instance.run_migrations(tmp_path)
|
||||||
|
|
||||||
|
apply_mock.assert_not_called()
|
||||||
|
cursor_mock.execute.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def _connection_with_cursor() -> tuple[mock.MagicMock, mock.MagicMock]:
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
connection_mock.__enter__.return_value = connection_mock
|
||||||
|
cursor_context = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
cursor_context.__enter__.return_value = cursor_mock
|
||||||
|
connection_mock.cursor.return_value = cursor_context
|
||||||
|
return connection_mock, cursor_mock
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_seeded_data_raises_when_currency_missing(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
cursor_mock.fetchall.return_value = [("USD", True)]
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance._verify_seeded_data(
|
||||||
|
expected_currency_codes={"USD", "EUR"},
|
||||||
|
expected_unit_codes=set(),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "EUR" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_seeded_data_raises_when_default_currency_inactive(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
cursor_mock.fetchall.return_value = [("USD", False)]
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance._verify_seeded_data(
|
||||||
|
expected_currency_codes={"USD"},
|
||||||
|
expected_unit_codes=set(),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "inactive" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_seeded_data_raises_when_units_missing(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
cursor_mock.fetchall.return_value = [("tonnes", True)]
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance._verify_seeded_data(
|
||||||
|
expected_currency_codes=set(),
|
||||||
|
expected_unit_codes={"tonnes", "liters"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "liters" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_seeded_data_raises_when_measurement_table_missing(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
cursor_mock.execute.side_effect = psycopg_errors.UndefinedTable("relation does not exist")
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_application_connection", return_value=connection_mock):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance._verify_seeded_data(
|
||||||
|
expected_currency_codes=set(),
|
||||||
|
expected_unit_codes={"tonnes"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "measurement_unit" in str(exc.value)
|
||||||
|
connection_mock.rollback.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_seed_baseline_data_rerun_uses_existing_records(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
connection_mock, cursor_mock = _connection_with_cursor()
|
||||||
|
|
||||||
|
currency_rows = [(code, True) for code, *_ in seed_data.CURRENCY_SEEDS]
|
||||||
|
unit_rows = [(code, True) for code, *_ in seed_data.MEASUREMENT_UNIT_SEEDS]
|
||||||
|
|
||||||
|
cursor_mock.fetchall.side_effect = [
|
||||||
|
currency_rows,
|
||||||
|
unit_rows,
|
||||||
|
currency_rows,
|
||||||
|
unit_rows,
|
||||||
|
]
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance, "_application_connection", return_value=connection_mock
|
||||||
|
), mock.patch("scripts.seed_data.run_with_namespace") as seed_run:
|
||||||
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
|
setup_instance.seed_baseline_data(dry_run=False)
|
||||||
|
|
||||||
|
assert seed_run.call_count == 2
|
||||||
|
first_namespace = seed_run.call_args_list[0].args[0]
|
||||||
|
assert isinstance(first_namespace, argparse.Namespace)
|
||||||
|
assert first_namespace.dry_run is False
|
||||||
|
assert seed_run.call_args_list[0].kwargs["config"] is setup_instance.config
|
||||||
|
assert cursor_mock.execute.call_count == 4
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_database_raises_with_context(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
cursor_mock.fetchone.return_value = None
|
||||||
|
cursor_mock.execute.side_effect = [None, psycopg2.Error("create_fail")]
|
||||||
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance.ensure_database()
|
||||||
|
|
||||||
|
assert "Failed to create database" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_role_raises_with_context_during_creation(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
|
admin_cursor.fetchone.return_value = None
|
||||||
|
admin_cursor.execute.side_effect = [None, psycopg2.Error("role_fail")]
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_admin_connection",
|
||||||
|
side_effect=[admin_conn],
|
||||||
|
):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance.ensure_role()
|
||||||
|
|
||||||
|
assert "Failed to create role" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_role_raises_with_context_during_privilege_grants(
|
||||||
|
mock_config: DatabaseConfig,
|
||||||
|
) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
|
admin_cursor.fetchone.return_value = (1,)
|
||||||
|
|
||||||
|
privilege_conn, privilege_cursor = _connection_with_cursor()
|
||||||
|
privilege_cursor.execute.side_effect = [psycopg2.Error("grant_fail")]
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_admin_connection",
|
||||||
|
side_effect=[admin_conn, privilege_conn],
|
||||||
|
):
|
||||||
|
with pytest.raises(RuntimeError) as exc:
|
||||||
|
setup_instance.ensure_role()
|
||||||
|
|
||||||
|
assert "Failed to grant privileges" in str(exc.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_database_dry_run_skips_creation(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
cursor_mock.fetchone.return_value = None
|
||||||
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch(
|
||||||
|
"scripts.setup_database.logger"
|
||||||
|
) as logger_mock:
|
||||||
|
setup_instance.ensure_database()
|
||||||
|
|
||||||
|
# expect only existence check, no create attempt
|
||||||
|
cursor_mock.execute.assert_called_once()
|
||||||
|
logger_mock.info.assert_any_call(
|
||||||
|
"Dry run: would create database '%s'. Run without --dry-run to proceed.", mock_config.database
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_role_dry_run_skips_creation_and_grants(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
|
||||||
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
|
admin_cursor.fetchone.return_value = None
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_admin_connection",
|
||||||
|
side_effect=[admin_conn],
|
||||||
|
) as conn_mock, mock.patch("scripts.setup_database.logger") as logger_mock:
|
||||||
|
setup_instance.ensure_role()
|
||||||
|
|
||||||
|
assert conn_mock.call_count == 1
|
||||||
|
admin_cursor.execute.assert_called_once()
|
||||||
|
logger_mock.info.assert_any_call(
|
||||||
|
"Dry run: would create role '%s'. Run without --dry-run to apply.", mock_config.user
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_rollback_skipped_when_dry_run(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=True)
|
||||||
|
setup_instance._register_rollback("noop", lambda: None)
|
||||||
|
assert setup_instance._rollback_actions == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_execute_rollbacks_runs_in_reverse_order(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
calls: list[str] = []
|
||||||
|
|
||||||
|
def first_action() -> None:
|
||||||
|
calls.append("first")
|
||||||
|
|
||||||
|
def second_action() -> None:
|
||||||
|
calls.append("second")
|
||||||
|
|
||||||
|
setup_instance._register_rollback("first", first_action)
|
||||||
|
setup_instance._register_rollback("second", second_action)
|
||||||
|
|
||||||
|
with mock.patch("scripts.setup_database.logger"):
|
||||||
|
setup_instance.execute_rollbacks()
|
||||||
|
|
||||||
|
assert calls == ["second", "first"]
|
||||||
|
assert setup_instance._rollback_actions == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_database_registers_rollback_action(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
connection_mock = mock.MagicMock()
|
||||||
|
cursor_mock = mock.MagicMock()
|
||||||
|
cursor_mock.fetchone.return_value = None
|
||||||
|
connection_mock.cursor.return_value = cursor_mock
|
||||||
|
|
||||||
|
with mock.patch.object(setup_instance, "_admin_connection", return_value=connection_mock), mock.patch.object(
|
||||||
|
setup_instance, "_register_rollback"
|
||||||
|
) as register_mock, mock.patch.object(setup_instance, "_drop_database") as drop_mock:
|
||||||
|
setup_instance.ensure_database()
|
||||||
|
register_mock.assert_called_once()
|
||||||
|
label, action = register_mock.call_args[0]
|
||||||
|
assert "drop database" in label
|
||||||
|
action()
|
||||||
|
drop_mock.assert_called_once_with(mock_config.database)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_role_registers_rollback_actions(mock_config: DatabaseConfig) -> None:
|
||||||
|
setup_instance = DatabaseSetup(mock_config, dry_run=False)
|
||||||
|
|
||||||
|
admin_conn, admin_cursor = _connection_with_cursor()
|
||||||
|
admin_cursor.fetchone.return_value = None
|
||||||
|
privilege_conn, privilege_cursor = _connection_with_cursor()
|
||||||
|
|
||||||
|
with mock.patch.object(
|
||||||
|
setup_instance,
|
||||||
|
"_admin_connection",
|
||||||
|
side_effect=[admin_conn, privilege_conn],
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_instance, "_register_rollback"
|
||||||
|
) as register_mock, mock.patch.object(
|
||||||
|
setup_instance, "_drop_role"
|
||||||
|
) as drop_mock, mock.patch.object(
|
||||||
|
setup_instance, "_revoke_role_privileges"
|
||||||
|
) as revoke_mock:
|
||||||
|
setup_instance.ensure_role()
|
||||||
|
assert register_mock.call_count == 2
|
||||||
|
drop_label, drop_action = register_mock.call_args_list[0][0]
|
||||||
|
revoke_label, revoke_action = register_mock.call_args_list[1][0]
|
||||||
|
|
||||||
|
assert "drop role" in drop_label
|
||||||
|
assert "revoke privileges" in revoke_label
|
||||||
|
|
||||||
|
drop_action()
|
||||||
|
drop_mock.assert_called_once_with(mock_config.user)
|
||||||
|
|
||||||
|
revoke_action()
|
||||||
|
revoke_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_main_triggers_rollbacks_on_failure(mock_config: DatabaseConfig) -> None:
|
||||||
|
args = argparse.Namespace(
|
||||||
|
ensure_database=True,
|
||||||
|
ensure_role=True,
|
||||||
|
ensure_schema=False,
|
||||||
|
initialize_schema=False,
|
||||||
|
run_migrations=False,
|
||||||
|
seed_data=False,
|
||||||
|
migrations_dir=None,
|
||||||
|
db_driver=None,
|
||||||
|
db_host=None,
|
||||||
|
db_port=None,
|
||||||
|
db_name=None,
|
||||||
|
db_user=None,
|
||||||
|
db_password=None,
|
||||||
|
db_schema=None,
|
||||||
|
admin_url=None,
|
||||||
|
admin_user=None,
|
||||||
|
admin_password=None,
|
||||||
|
admin_db=None,
|
||||||
|
dry_run=False,
|
||||||
|
verbose=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
with mock.patch.object(setup_db_module, "parse_args", return_value=args), mock.patch.object(
|
||||||
|
setup_db_module.DatabaseConfig, "from_env", return_value=mock_config
|
||||||
|
), mock.patch.object(
|
||||||
|
setup_db_module, "DatabaseSetup"
|
||||||
|
) as setup_cls:
|
||||||
|
setup_instance = mock.MagicMock()
|
||||||
|
setup_instance.dry_run = False
|
||||||
|
setup_instance._rollback_actions = [
|
||||||
|
("drop role", mock.MagicMock()),
|
||||||
|
]
|
||||||
|
setup_instance.ensure_database.side_effect = RuntimeError("boom")
|
||||||
|
setup_instance.execute_rollbacks = mock.MagicMock()
|
||||||
|
setup_instance.clear_rollbacks = mock.MagicMock()
|
||||||
|
setup_cls.return_value = setup_instance
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
setup_db_module.main()
|
||||||
|
|
||||||
|
setup_instance.execute_rollbacks.assert_called_once()
|
||||||
|
setup_instance.clear_rollbacks.assert_called_once()
|
||||||
@@ -4,6 +4,7 @@ import pytest
|
|||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
from models.scenario import Scenario
|
from models.scenario import Scenario
|
||||||
|
from services import settings as settings_service
|
||||||
|
|
||||||
|
|
||||||
def test_dashboard_route_provides_summary(
|
def test_dashboard_route_provides_summary(
|
||||||
@@ -129,3 +130,36 @@ def test_additional_ui_routes_render_templates(
|
|||||||
|
|
||||||
context = cast(Dict[str, Any], getattr(response, "context", {}))
|
context = cast(Dict[str, Any], getattr(response, "context", {}))
|
||||||
assert context
|
assert context
|
||||||
|
|
||||||
|
|
||||||
|
def test_settings_route_provides_css_context(
|
||||||
|
api_client: TestClient,
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
env_var = settings_service.css_key_to_env_var("--color-accent")
|
||||||
|
monkeypatch.setenv(env_var, "#abcdef")
|
||||||
|
|
||||||
|
response = api_client.get("/ui/settings")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
template = getattr(response, "template", None)
|
||||||
|
assert template is not None
|
||||||
|
assert template.name == "settings.html"
|
||||||
|
|
||||||
|
context = cast(Dict[str, Any], getattr(response, "context", {}))
|
||||||
|
assert "css_variables" in context
|
||||||
|
assert "css_defaults" in context
|
||||||
|
assert "css_env_overrides" in context
|
||||||
|
assert "css_env_override_rows" in context
|
||||||
|
assert "css_env_override_meta" in context
|
||||||
|
|
||||||
|
assert context["css_variables"]["--color-accent"] == "#abcdef"
|
||||||
|
assert context["css_defaults"]["--color-accent"] == settings_service.CSS_COLOR_DEFAULTS["--color-accent"]
|
||||||
|
assert context["css_env_overrides"]["--color-accent"] == "#abcdef"
|
||||||
|
|
||||||
|
override_rows = context["css_env_override_rows"]
|
||||||
|
assert any(row["env_var"] == env_var for row in override_rows)
|
||||||
|
|
||||||
|
meta = context["css_env_override_meta"]["--color-accent"]
|
||||||
|
assert meta["value"] == "#abcdef"
|
||||||
|
assert meta["env_var"] == env_var
|
||||||
|
|||||||
Reference in New Issue
Block a user