Files
calminer/routes/exports.py

320 lines
9.1 KiB
Python

from __future__ import annotations
import logging
import time
from datetime import datetime, timezone
from typing import Annotated
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from fastapi.responses import HTMLResponse, StreamingResponse
from fastapi.templating import Jinja2Templates
from dependencies import get_unit_of_work, require_any_role
from schemas.exports import (
ExportFormat,
ProjectExportRequest,
ScenarioExportRequest,
)
from services.export_serializers import (
export_projects_to_excel,
export_scenarios_to_excel,
stream_projects_to_csv,
stream_scenarios_to_csv,
)
from services.unit_of_work import UnitOfWork
from models.import_export_log import ImportExportLog
from monitoring.metrics import observe_export
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/exports", tags=["exports"])
@router.get(
"/modal/{dataset}",
response_model=None,
response_class=HTMLResponse,
include_in_schema=False,
name="exports.modal",
)
async def export_modal(
dataset: str,
request: Request,
) -> HTMLResponse:
dataset = dataset.lower()
if dataset not in {"projects", "scenarios"}:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Unknown dataset")
submit_url = request.url_for(
"export_projects" if dataset == "projects" else "export_scenarios"
)
templates = Jinja2Templates(directory="templates")
return templates.TemplateResponse(
request,
"exports/modal.html",
{
"dataset": dataset,
"submit_url": submit_url,
},
)
def _timestamp_suffix() -> str:
return datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
def _ensure_repository(repo, name: str):
if repo is None:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"{name} repository unavailable")
return repo
def _record_export_audit(
*,
uow: UnitOfWork,
dataset: str,
status: str,
export_format: ExportFormat,
row_count: int,
filename: str | None,
) -> None:
try:
if uow.session is None:
return
log = ImportExportLog(
action="export",
dataset=dataset,
status=status,
filename=filename,
row_count=row_count,
detail=f"format={export_format.value}",
)
uow.session.add(log)
uow.commit()
except Exception:
# best-effort auditing, do not break exports
if uow.session is not None:
uow.session.rollback()
logger.exception(
"export.audit.failed",
extra={
"event": "export.audit",
"dataset": dataset,
"status": status,
"format": export_format.value,
},
)
@router.post(
"/projects",
status_code=status.HTTP_200_OK,
response_class=StreamingResponse,
dependencies=[Depends(require_any_role(
"admin", "project_manager", "analyst"))],
)
async def export_projects(
request: ProjectExportRequest,
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
) -> Response:
project_repo = _ensure_repository(
getattr(uow, "projects", None), "Project")
try:
start = time.perf_counter()
projects = project_repo.filtered_for_export(request.filters)
except Exception as exc:
_record_export_audit(
uow=uow,
dataset="projects",
status="failure",
export_format=request.format,
row_count=0,
filename=None,
)
logger.exception(
"export.failed",
extra={
"event": "export",
"dataset": "projects",
"status": "failure",
"format": request.format.value,
},
)
raise exc
filename = f"projects-{_timestamp_suffix()}"
start = time.perf_counter()
if request.format == ExportFormat.CSV:
stream = stream_projects_to_csv(projects)
response = StreamingResponse(stream, media_type="text/csv")
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
_record_export_audit(
uow=uow,
dataset="projects",
status="success",
export_format=request.format,
row_count=len(projects),
filename=f"{filename}.csv",
)
logger.info(
"export",
extra={
"event": "export",
"dataset": "projects",
"status": "success",
"format": request.format.value,
"row_count": len(projects),
"filename": f"{filename}.csv",
},
)
observe_export(
dataset="projects",
status="success",
export_format=request.format.value,
seconds=time.perf_counter() - start,
)
return response
data = export_projects_to_excel(projects)
_record_export_audit(
uow=uow,
dataset="projects",
status="success",
export_format=request.format,
row_count=len(projects),
filename=f"{filename}.xlsx",
)
logger.info(
"export",
extra={
"event": "export",
"dataset": "projects",
"status": "success",
"format": request.format.value,
"row_count": len(projects),
"filename": f"{filename}.xlsx",
},
)
observe_export(
dataset="projects",
status="success",
export_format=request.format.value,
seconds=time.perf_counter() - start,
)
return StreamingResponse(
iter([data]),
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
headers={
"Content-Disposition": f"attachment; filename={filename}.xlsx",
},
)
@router.post(
"/scenarios",
status_code=status.HTTP_200_OK,
response_class=StreamingResponse,
dependencies=[Depends(require_any_role(
"admin", "project_manager", "analyst"))],
)
async def export_scenarios(
request: ScenarioExportRequest,
uow: Annotated[UnitOfWork, Depends(get_unit_of_work)],
) -> Response:
scenario_repo = _ensure_repository(
getattr(uow, "scenarios", None), "Scenario")
try:
start = time.perf_counter()
scenarios = scenario_repo.filtered_for_export(
request.filters, include_project=True)
except Exception as exc:
_record_export_audit(
uow=uow,
dataset="scenarios",
status="failure",
export_format=request.format,
row_count=0,
filename=None,
)
logger.exception(
"export.failed",
extra={
"event": "export",
"dataset": "scenarios",
"status": "failure",
"format": request.format.value,
},
)
raise exc
filename = f"scenarios-{_timestamp_suffix()}"
start = time.perf_counter()
if request.format == ExportFormat.CSV:
stream = stream_scenarios_to_csv(scenarios)
response = StreamingResponse(stream, media_type="text/csv")
response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv"
_record_export_audit(
uow=uow,
dataset="scenarios",
status="success",
export_format=request.format,
row_count=len(scenarios),
filename=f"{filename}.csv",
)
logger.info(
"export",
extra={
"event": "export",
"dataset": "scenarios",
"status": "success",
"format": request.format.value,
"row_count": len(scenarios),
"filename": f"{filename}.csv",
},
)
observe_export(
dataset="scenarios",
status="success",
export_format=request.format.value,
seconds=time.perf_counter() - start,
)
return response
data = export_scenarios_to_excel(scenarios)
_record_export_audit(
uow=uow,
dataset="scenarios",
status="success",
export_format=request.format,
row_count=len(scenarios),
filename=f"{filename}.xlsx",
)
logger.info(
"export",
extra={
"event": "export",
"dataset": "scenarios",
"status": "success",
"format": request.format.value,
"row_count": len(scenarios),
"filename": f"{filename}.xlsx",
},
)
observe_export(
dataset="scenarios",
status="success",
export_format=request.format.value,
seconds=time.perf_counter() - start,
)
return StreamingResponse(
iter([data]),
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
headers={
"Content-Disposition": f"attachment; filename={filename}.xlsx",
},
)