feat: implement persistent audit logging for import/export operations with Prometheus metrics
This commit is contained in:
124
tests/test_import_export_integration.py
Normal file
124
tests/test_import_export_integration.py
Normal file
@@ -0,0 +1,124 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from models import (
|
||||
MiningOperationType,
|
||||
Project,
|
||||
Scenario,
|
||||
ScenarioStatus,
|
||||
)
|
||||
from models.import_export_log import ImportExportLog
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def project_seed(unit_of_work_factory):
|
||||
with unit_of_work_factory() as uow:
|
||||
assert uow.projects is not None
|
||||
project = Project(name="Seed Project", operation_type=MiningOperationType.OPEN_PIT)
|
||||
uow.projects.create(project)
|
||||
yield project
|
||||
|
||||
|
||||
def test_project_import_preview_and_commit(client: TestClient, unit_of_work_factory) -> None:
|
||||
csv_content = (
|
||||
"name,location,operation_type\n"
|
||||
"Project Import A,Chile,open pit\n"
|
||||
"Project Import B,Canada,underground\n"
|
||||
)
|
||||
files = {"file": ("projects.csv", csv_content, "text/csv")}
|
||||
|
||||
preview_response = client.post("/imports/projects/preview", files=files)
|
||||
assert preview_response.status_code == 200
|
||||
preview_payload = preview_response.json()
|
||||
assert preview_payload["summary"]["accepted"] == 2
|
||||
assert preview_payload["stage_token"]
|
||||
|
||||
token = preview_payload["stage_token"]
|
||||
|
||||
commit_response = client.post("/imports/projects/commit", json={"token": token})
|
||||
assert commit_response.status_code == 200
|
||||
commit_payload = commit_response.json()
|
||||
assert commit_payload["summary"]["created"] == 2
|
||||
|
||||
with unit_of_work_factory() as uow:
|
||||
assert uow.projects is not None
|
||||
names = {project.name for project in uow.projects.list()}
|
||||
assert {"Project Import A", "Project Import B"}.issubset(names)
|
||||
# ensure audit logs recorded preview and commit events
|
||||
assert uow.session is not None
|
||||
logs = (
|
||||
uow.session.query(ImportExportLog)
|
||||
.filter(ImportExportLog.dataset == "projects")
|
||||
.order_by(ImportExportLog.created_at)
|
||||
.all()
|
||||
)
|
||||
actions = [log.action for log in logs]
|
||||
assert "preview" in actions
|
||||
assert "commit" in actions
|
||||
|
||||
|
||||
def test_scenario_import_preview_and_commit(client: TestClient, unit_of_work_factory, project_seed) -> None:
|
||||
csv_content = (
|
||||
"project_name,name,status\n"
|
||||
"Seed Project,Scenario Import A,Draft\n"
|
||||
"Seed Project,Scenario Import B,Active\n"
|
||||
)
|
||||
files = {"file": ("scenarios.csv", csv_content, "text/csv")}
|
||||
|
||||
preview_response = client.post("/imports/scenarios/preview", files=files)
|
||||
assert preview_response.status_code == 200
|
||||
preview_payload = preview_response.json()
|
||||
assert preview_payload["summary"]["accepted"] == 2
|
||||
token = preview_payload["stage_token"]
|
||||
|
||||
commit_response = client.post("/imports/scenarios/commit", json={"token": token})
|
||||
assert commit_response.status_code == 200
|
||||
commit_payload = commit_response.json()
|
||||
assert commit_payload["summary"]["created"] == 2
|
||||
|
||||
with unit_of_work_factory() as uow:
|
||||
assert uow.projects is not None and uow.scenarios is not None
|
||||
project = uow.projects.list()[0]
|
||||
scenarios = uow.scenarios.list_for_project(project.id)
|
||||
names = {scenario.name for scenario in scenarios}
|
||||
assert {"Scenario Import A", "Scenario Import B"}.issubset(names)
|
||||
assert uow.session is not None
|
||||
logs = (
|
||||
uow.session.query(ImportExportLog)
|
||||
.filter(ImportExportLog.dataset == "scenarios")
|
||||
.order_by(ImportExportLog.created_at)
|
||||
.all()
|
||||
)
|
||||
actions = [log.action for log in logs]
|
||||
assert "preview" in actions
|
||||
assert "commit" in actions
|
||||
|
||||
|
||||
def test_project_export_endpoint(client: TestClient, unit_of_work_factory) -> None:
|
||||
with unit_of_work_factory() as uow:
|
||||
assert uow.projects is not None
|
||||
uow.projects.create(Project(name="Export Project", operation_type=MiningOperationType.OPEN_PIT))
|
||||
|
||||
response = client.post("/exports/projects", json={"format": "csv"})
|
||||
assert response.status_code == 200
|
||||
assert response.headers["Content-Type"].startswith("text/csv")
|
||||
assert "attachment; filename=" in response.headers["Content-Disposition"]
|
||||
body = response.content.decode("utf-8")
|
||||
assert "Export Project" in body
|
||||
|
||||
with unit_of_work_factory() as uow:
|
||||
assert uow.session is not None
|
||||
logs = (
|
||||
uow.session.query(ImportExportLog)
|
||||
.filter(ImportExportLog.dataset == "projects", ImportExportLog.action == "export")
|
||||
.order_by(ImportExportLog.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
assert logs is not None
|
||||
assert logs.status == "success"
|
||||
assert logs.row_count >= 1
|
||||
@@ -1,12 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
from textwrap import dedent
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from services.importers import ImportResult, load_project_imports, load_scenario_imports
|
||||
from schemas.imports import ProjectImportRow, ScenarioImportRow
|
||||
from models.project import MiningOperationType
|
||||
|
||||
|
||||
def test_load_project_imports_from_csv() -> None:
|
||||
@@ -76,3 +78,65 @@ def test_import_errors_include_row_numbers() -> None:
|
||||
assert error.row_number == 2
|
||||
assert error.field == "name"
|
||||
assert "required" in error.message
|
||||
|
||||
|
||||
def test_project_import_handles_missing_columns() -> None:
|
||||
csv_content = "name\nProject Only\n"
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_project_imports(stream, "projects.csv")
|
||||
|
||||
assert result.rows == []
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field == "operation_type"
|
||||
|
||||
|
||||
def test_project_import_rejects_invalid_operation_type() -> None:
|
||||
csv_content = "name,operation_type\nProject X,unknown\n"
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_project_imports(stream, "projects.csv")
|
||||
|
||||
assert len(result.rows) == 0
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field == "operation_type"
|
||||
|
||||
|
||||
def test_scenario_import_flags_invalid_dates() -> None:
|
||||
csv_content = dedent(
|
||||
"""
|
||||
project_name,name,status,start_date,end_date
|
||||
Project A,Scenario Reverse,Draft,2025-12-31,2025-01-01
|
||||
"""
|
||||
).strip()
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_scenario_imports(stream, "scenarios.csv")
|
||||
|
||||
assert len(result.rows) == 0
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field is None
|
||||
|
||||
|
||||
def test_scenario_import_handles_large_dataset() -> None:
|
||||
buffer = BytesIO()
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"project_name": ["Project"] * 500,
|
||||
"name": [f"Scenario {i}" for i in range(500)],
|
||||
"status": ["draft"] * 500,
|
||||
}
|
||||
)
|
||||
df.to_csv(buffer, index=False)
|
||||
buffer.seek(0)
|
||||
|
||||
result = load_scenario_imports(buffer, "bulk.csv")
|
||||
|
||||
assert len(result.rows) == 500
|
||||
assert len(result.rows) == 500
|
||||
|
||||
Reference in New Issue
Block a user