feat: implement persistent audit logging for import/export operations with Prometheus metrics
This commit is contained in:
@@ -1,12 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
from textwrap import dedent
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from services.importers import ImportResult, load_project_imports, load_scenario_imports
|
||||
from schemas.imports import ProjectImportRow, ScenarioImportRow
|
||||
from models.project import MiningOperationType
|
||||
|
||||
|
||||
def test_load_project_imports_from_csv() -> None:
|
||||
@@ -76,3 +78,65 @@ def test_import_errors_include_row_numbers() -> None:
|
||||
assert error.row_number == 2
|
||||
assert error.field == "name"
|
||||
assert "required" in error.message
|
||||
|
||||
|
||||
def test_project_import_handles_missing_columns() -> None:
|
||||
csv_content = "name\nProject Only\n"
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_project_imports(stream, "projects.csv")
|
||||
|
||||
assert result.rows == []
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field == "operation_type"
|
||||
|
||||
|
||||
def test_project_import_rejects_invalid_operation_type() -> None:
|
||||
csv_content = "name,operation_type\nProject X,unknown\n"
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_project_imports(stream, "projects.csv")
|
||||
|
||||
assert len(result.rows) == 0
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field == "operation_type"
|
||||
|
||||
|
||||
def test_scenario_import_flags_invalid_dates() -> None:
|
||||
csv_content = dedent(
|
||||
"""
|
||||
project_name,name,status,start_date,end_date
|
||||
Project A,Scenario Reverse,Draft,2025-12-31,2025-01-01
|
||||
"""
|
||||
).strip()
|
||||
stream = BytesIO(csv_content.encode("utf-8"))
|
||||
|
||||
result = load_scenario_imports(stream, "scenarios.csv")
|
||||
|
||||
assert len(result.rows) == 0
|
||||
assert len(result.errors) == 1
|
||||
error = result.errors[0]
|
||||
assert error.row_number == 2
|
||||
assert error.field is None
|
||||
|
||||
|
||||
def test_scenario_import_handles_large_dataset() -> None:
|
||||
buffer = BytesIO()
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"project_name": ["Project"] * 500,
|
||||
"name": [f"Scenario {i}" for i in range(500)],
|
||||
"status": ["draft"] * 500,
|
||||
}
|
||||
)
|
||||
df.to_csv(buffer, index=False)
|
||||
buffer.seek(0)
|
||||
|
||||
result = load_scenario_imports(buffer, "bulk.csv")
|
||||
|
||||
assert len(result.rows) == 500
|
||||
assert len(result.rows) == 500
|
||||
|
||||
Reference in New Issue
Block a user