v1
Some checks failed
CI / test (3.11) (push) Failing after 5m36s
CI / build-image (push) Has been skipped

This commit is contained in:
2025-10-22 16:48:55 +02:00
commit 4cefd4e3ab
53 changed files with 5837 additions and 0 deletions

4
server/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
"""Server application package."""
from .factory import create_app
__all__ = ["create_app"]

35
server/app.py Normal file
View File

@@ -0,0 +1,35 @@
"""Compatibility layer exposing the Flask app instance."""
from __future__ import annotations
from pathlib import Path
from .database import (
DB_PATH as _DB_PATH,
DEFAULT_DB_PATH,
db_cursor,
init_db as _init_db,
is_postgres_enabled,
set_db_path,
set_postgres_override,
)
from .factory import create_app
app = create_app()
DB_PATH: Path = _DB_PATH
def init_db() -> None:
"""Initialise the database using the current DB_PATH."""
set_db_path(DB_PATH)
_init_db()
__all__ = [
"app",
"DB_PATH",
"DEFAULT_DB_PATH",
"db_cursor",
"init_db",
"is_postgres_enabled",
"set_postgres_override",
]

16
server/auth.py Normal file
View File

@@ -0,0 +1,16 @@
"""Authentication utilities."""
from __future__ import annotations
from functools import wraps
from flask import redirect, session, url_for
def login_required(f):
"""Decorator to require login for routes."""
@wraps(f)
def decorated_function(*args, **kwargs):
if not session.get("logged_in"):
return redirect(url_for("auth.login"))
return f(*args, **kwargs)
return decorated_function

691
server/database.py Normal file
View File

@@ -0,0 +1,691 @@
"""Database helpers supporting SQLite and optional Postgres."""
from __future__ import annotations
import logging
import sqlite3
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, Any, Iterator, Tuple
from . import settings
try: # psycopg2 is optional
import psycopg2
except Exception: # pragma: no cover
psycopg2 = None # type: ignore
if TYPE_CHECKING: # pragma: no cover
from .services.contact import ContactSubmission
DB_PATH = Path(settings.SQLITE_DB_PATH)
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
DEFAULT_DB_PATH = DB_PATH
_USE_POSTGRES_OVERRIDE: bool | None = None
# Keep legacy-style flag available for external access.
USE_POSTGRES = False
def set_db_path(new_path: Path | str) -> None:
"""Update the SQLite database path (used primarily in tests)."""
global DB_PATH
DB_PATH = Path(new_path)
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
def set_postgres_override(value: bool | None) -> None:
"""Allow callers to force-enable or disable Postgres usage."""
global _USE_POSTGRES_OVERRIDE
_USE_POSTGRES_OVERRIDE = value
def is_postgres_enabled() -> bool:
"""Return True when Postgres should be used for database operations."""
if _USE_POSTGRES_OVERRIDE is not None:
use_pg = _USE_POSTGRES_OVERRIDE
elif psycopg2 is None or not settings.POSTGRES_URL:
use_pg = False
else:
use_pg = DB_PATH == DEFAULT_DB_PATH
globals()["USE_POSTGRES"] = use_pg
return use_pg
@contextmanager
def db_cursor(*, read_only: bool = False) -> Iterator[Tuple[Any, Any]]:
"""Yield a database cursor for either SQLite or Postgres."""
use_pg = is_postgres_enabled()
if use_pg:
if psycopg2 is None:
raise RuntimeError(
"Postgres requested but psycopg2 is unavailable")
conn = psycopg2.connect(settings.POSTGRES_URL)
else:
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(str(DB_PATH))
try:
cur = conn.cursor()
try:
yield conn, cur
if use_pg:
if read_only:
conn.rollback()
else:
conn.commit()
elif not read_only:
conn.commit()
except Exception:
try:
conn.rollback()
except Exception:
pass
raise
finally:
try:
cur.close()
except Exception:
pass
finally:
conn.close()
def init_db() -> None:
"""Create the required tables if they do not exist."""
if is_postgres_enabled():
with db_cursor() as (_, cur):
cur.execute(
"""
CREATE TABLE IF NOT EXISTS contact (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
email TEXT NOT NULL,
company TEXT,
message TEXT NOT NULL,
timeline TEXT,
created_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS subscribers (
email TEXT PRIMARY KEY,
subscribed_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS app_settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS newsletters (
id SERIAL PRIMARY KEY,
subject TEXT NOT NULL,
content TEXT NOT NULL,
sender_name TEXT,
send_date TEXT,
status TEXT NOT NULL DEFAULT 'draft',
created_at TEXT NOT NULL,
sent_at TEXT
)
"""
)
else:
with db_cursor() as (_, cur):
cur.execute(
"""
CREATE TABLE IF NOT EXISTS contact (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
email TEXT NOT NULL,
company TEXT,
message TEXT NOT NULL,
timeline TEXT,
created_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS subscribers (
email TEXT PRIMARY KEY,
subscribed_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS app_settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS newsletters (
id INTEGER PRIMARY KEY AUTOINCREMENT,
subject TEXT NOT NULL,
content TEXT NOT NULL,
sender_name TEXT,
send_date TEXT,
status TEXT NOT NULL DEFAULT 'draft',
created_at TEXT NOT NULL,
sent_at TEXT
)
"""
)
def save_contact(submission: "ContactSubmission") -> int:
"""Persist a contact submission and return its identifier."""
record_id = 0
use_pg = is_postgres_enabled()
with db_cursor() as (_, cur):
if use_pg:
cur.execute(
"INSERT INTO contact (name, email, company, message, timeline, created_at) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id",
(
submission.name,
submission.email,
submission.company,
submission.message,
submission.timeline,
submission.created_at,
),
)
row = cur.fetchone()
if row:
record_id = int(row[0])
else:
cur.execute(
"INSERT INTO contact (name, email, company, message, timeline, created_at) VALUES (?, ?, ?, ?, ?, ?)",
(
submission.name,
submission.email,
submission.company,
submission.message,
submission.timeline,
submission.created_at,
),
)
record_id = int(cur.lastrowid or 0)
return record_id
def save_subscriber(email: str, *, created_at: str) -> bool:
"""Persist a newsletter subscriber. Returns False on duplicate entries."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute(
"INSERT INTO subscribers (email, subscribed_at) VALUES (%s, %s)",
(email, created_at),
)
else:
cur.execute(
"INSERT INTO subscribers (email, subscribed_at) VALUES (?, ?)",
(email, created_at),
)
return True
except sqlite3.IntegrityError:
return False
except Exception as exc:
if use_pg and psycopg2 is not None and isinstance(exc, psycopg2.IntegrityError):
return False
raise
def delete_subscriber(email: str) -> bool:
"""Remove a newsletter subscriber. Returns True if deleted, False if not found."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute(
"DELETE FROM subscribers WHERE email = %s", (email,))
else:
cur.execute(
"DELETE FROM subscribers WHERE email = ?", (email,))
return cur.rowcount > 0
except Exception as exc:
logging.exception("Failed to delete subscriber: %s", exc)
raise
def update_subscriber(old_email: str, new_email: str) -> bool:
"""Update a subscriber's email. Returns True if updated, False if old_email not found or new_email exists."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
# Check if old_email exists and new_email doesn't
if use_pg:
cur.execute(
"SELECT 1 FROM subscribers WHERE email = %s", (old_email,))
if not cur.fetchone():
return False
cur.execute(
"SELECT 1 FROM subscribers WHERE email = %s", (new_email,))
if cur.fetchone():
return False
cur.execute(
"UPDATE subscribers SET email = %s WHERE email = %s", (new_email, old_email))
else:
cur.execute(
"SELECT 1 FROM subscribers WHERE email = ?", (old_email,))
if not cur.fetchone():
return False
cur.execute(
"SELECT 1 FROM subscribers WHERE email = ?", (new_email,))
if cur.fetchone():
return False
cur.execute(
"UPDATE subscribers SET email = ? WHERE email = ?", (new_email, old_email))
return cur.rowcount > 0
except Exception as exc:
logging.exception("Failed to update subscriber: %s", exc)
raise
def get_contacts(
page: int = 1,
per_page: int = 50,
sort_by: str = "created_at",
sort_order: str = "desc",
email_filter: str | None = None,
date_from: str | None = None,
date_to: str | None = None,
) -> Tuple[list[dict], int]:
"""Retrieve contact submissions with pagination, filtering, and sorting."""
use_pg = is_postgres_enabled()
offset = (page - 1) * per_page
# Build WHERE clause
where_conditions = []
params = []
if email_filter:
where_conditions.append("email LIKE ?")
params.append(f"%{email_filter}%")
if date_from:
where_conditions.append("created_at >= ?")
params.append(date_from)
if date_to:
where_conditions.append("created_at <= ?")
params.append(date_to)
where_clause = "WHERE " + \
" AND ".join(where_conditions) if where_conditions else ""
# Build ORDER BY clause
valid_sort_fields = {"id", "name", "email", "created_at"}
if sort_by not in valid_sort_fields:
sort_by = "created_at"
sort_order = "DESC" if sort_order.lower() == "desc" else "ASC"
order_clause = f"ORDER BY {sort_by} {sort_order}"
# Get total count
count_query = f"SELECT COUNT(*) FROM contact {where_clause}"
with db_cursor(read_only=True) as (_, cur):
if use_pg:
# Convert ? to %s for PostgreSQL
count_query = count_query.replace("?", "%s")
cur.execute(count_query, params)
total = cur.fetchone()[0]
# Get paginated results
select_query = f"""
SELECT id, name, email, company, message, timeline, created_at
FROM contact {where_clause} {order_clause}
LIMIT ? OFFSET ?
"""
params.extend([per_page, offset])
contacts = []
with db_cursor(read_only=True) as (_, cur):
if use_pg:
# Convert ? to %s for PostgreSQL and handle LIMIT/OFFSET
select_query = select_query.replace("?", "%s")
select_query = select_query.replace(
"LIMIT %s OFFSET %s", "LIMIT %s OFFSET %s")
cur.execute(select_query, params)
if use_pg:
rows = cur.fetchall()
else:
rows = cur.fetchall()
for row in rows:
contacts.append({
"id": row[0],
"name": row[1],
"email": row[2],
"company": row[3],
"message": row[4],
"timeline": row[5],
"created_at": row[6],
})
return contacts, total
def get_subscribers(
page: int = 1,
per_page: int = 50,
sort_by: str = "subscribed_at",
sort_order: str = "desc",
email_filter: str | None = None,
date_from: str | None = None,
date_to: str | None = None,
) -> Tuple[list[dict], int]:
"""Retrieve newsletter subscribers with pagination, filtering, and sorting."""
use_pg = is_postgres_enabled()
offset = (page - 1) * per_page
# Build WHERE clause
where_conditions = []
params = []
if email_filter:
where_conditions.append("email LIKE ?")
params.append(f"%{email_filter}%")
if date_from:
where_conditions.append("subscribed_at >= ?")
params.append(date_from)
if date_to:
where_conditions.append("subscribed_at <= ?")
params.append(date_to)
where_clause = "WHERE " + \
" AND ".join(where_conditions) if where_conditions else ""
# Build ORDER BY clause
valid_sort_fields = {"email", "subscribed_at"}
if sort_by not in valid_sort_fields:
sort_by = "subscribed_at"
sort_order = "DESC" if sort_order.lower() == "desc" else "ASC"
order_clause = f"ORDER BY {sort_by} {sort_order}"
# Get total count
count_query = f"SELECT COUNT(*) FROM subscribers {where_clause}"
with db_cursor(read_only=True) as (_, cur):
if use_pg:
# Convert ? to %s for PostgreSQL
count_query = count_query.replace("?", "%s")
cur.execute(count_query, params)
total = cur.fetchone()[0]
# Get paginated results
select_query = f"""
SELECT email, subscribed_at
FROM subscribers {where_clause} {order_clause}
LIMIT ? OFFSET ?
"""
params.extend([per_page, offset])
subscribers = []
with db_cursor(read_only=True) as (_, cur):
if use_pg:
# Convert ? to %s for PostgreSQL and handle LIMIT/OFFSET
select_query = select_query.replace("?", "%s")
select_query = select_query.replace(
"LIMIT %s OFFSET %s", "LIMIT %s OFFSET %s")
cur.execute(select_query, params)
rows = cur.fetchall()
for row in rows:
subscribers.append({
"email": row[0],
"subscribed_at": row[1],
})
return subscribers, total
def delete_contact(contact_id: int) -> bool:
"""Delete a contact submission by ID. Returns True if deleted."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute("DELETE FROM contact WHERE id = %s", (contact_id,))
else:
cur.execute("DELETE FROM contact WHERE id = ?", (contact_id,))
return cur.rowcount > 0
except Exception as exc:
logging.exception("Failed to delete contact: %s", exc)
raise
def get_app_settings() -> dict[str, str]:
"""Retrieve all application settings as a dictionary."""
settings_dict = {}
with db_cursor(read_only=True) as (_, cur):
cur.execute("SELECT key, value FROM app_settings ORDER BY key")
rows = cur.fetchall()
for row in rows:
settings_dict[row[0]] = row[1]
return settings_dict
def update_app_setting(key: str, value: str) -> bool:
"""Update or insert an application setting. Returns True on success."""
from datetime import datetime, timezone
updated_at = datetime.now(timezone.utc).isoformat()
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute(
"""
INSERT INTO app_settings (key, value, updated_at)
VALUES (%s, %s, %s)
ON CONFLICT (key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at
""",
(key, value, updated_at),
)
else:
cur.execute(
"""
INSERT OR REPLACE INTO app_settings (key, value, updated_at)
VALUES (?, ?, ?)
""",
(key, value, updated_at),
)
return True
except Exception as exc:
logging.exception("Failed to update app setting: %s", exc)
raise
def delete_app_setting(key: str) -> bool:
"""Delete an application setting. Returns True if deleted."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute("DELETE FROM app_settings WHERE key = %s", (key,))
else:
cur.execute("DELETE FROM app_settings WHERE key = ?", (key,))
return cur.rowcount > 0
except Exception as exc:
logging.exception("Failed to delete app setting: %s", exc)
raise
def save_newsletter(subject: str, content: str, sender_name: str | None = None, send_date: str | None = None, status: str = "draft") -> int:
"""Save a newsletter and return its ID."""
from datetime import datetime, timezone
created_at = datetime.now(timezone.utc).isoformat()
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if use_pg:
cur.execute(
"""
INSERT INTO newsletters (subject, content, sender_name, send_date, status, created_at)
VALUES (%s, %s, %s, %s, %s, %s) RETURNING id
""",
(subject, content, sender_name, send_date, status, created_at),
)
newsletter_id = cur.fetchone()[0]
else:
cur.execute(
"""
INSERT INTO newsletters (subject, content, sender_name, send_date, status, created_at)
VALUES (?, ?, ?, ?, ?, ?)
""",
(subject, content, sender_name, send_date, status, created_at),
)
newsletter_id = cur.lastrowid
return newsletter_id
except Exception as exc:
logging.exception("Failed to save newsletter: %s", exc)
raise
def get_newsletters(page: int = 1, per_page: int = 20, status_filter: str | None = None) -> tuple[list[dict], int]:
"""Get newsletters with pagination and optional status filtering."""
use_pg = is_postgres_enabled()
newsletters = []
total = 0
offset = (page - 1) * per_page
try:
with db_cursor(read_only=True) as (_, cur):
# Get total count
count_query = "SELECT COUNT(*) FROM newsletters"
count_params = []
if status_filter:
count_query += " WHERE status = %s" if use_pg else " WHERE status = ?"
count_params.append(status_filter)
cur.execute(count_query, count_params)
total = cur.fetchone()[0]
# Get newsletters
select_query = """
SELECT id, subject, sender_name, send_date, status, created_at, sent_at
FROM newsletters
"""
params = []
if status_filter:
select_query += " WHERE status = %s" if use_pg else " WHERE status = ?"
params.append(status_filter)
select_query += " ORDER BY created_at DESC"
select_query += " LIMIT %s OFFSET %s" if use_pg else " LIMIT ? OFFSET ?"
params.extend([per_page, offset])
cur.execute(select_query, params)
rows = cur.fetchall()
for row in rows:
newsletters.append({
"id": row[0],
"subject": row[1],
"sender_name": row[2],
"send_date": row[3],
"status": row[4],
"created_at": row[5],
"sent_at": row[6],
})
except Exception as exc:
logging.exception("Failed to get newsletters: %s", exc)
raise
return newsletters, total
def update_newsletter_status(newsletter_id: int, status: str, sent_at: str | None = None) -> bool:
"""Update newsletter status and optionally sent_at timestamp."""
use_pg = is_postgres_enabled()
try:
with db_cursor() as (_, cur):
if sent_at:
if use_pg:
cur.execute(
"UPDATE newsletters SET status = %s, sent_at = %s WHERE id = %s",
(status, sent_at, newsletter_id),
)
else:
cur.execute(
"UPDATE newsletters SET status = ?, sent_at = ? WHERE id = ?",
(status, sent_at, newsletter_id),
)
else:
if use_pg:
cur.execute(
"UPDATE newsletters SET status = %s WHERE id = %s",
(status, newsletter_id),
)
else:
cur.execute(
"UPDATE newsletters SET status = ? WHERE id = ?",
(status, newsletter_id),
)
return cur.rowcount > 0
except Exception as exc:
logging.exception("Failed to update newsletter status: %s", exc)
raise
def get_newsletter_by_id(newsletter_id: int) -> dict | None:
"""Get a specific newsletter by ID."""
use_pg = is_postgres_enabled()
try:
with db_cursor(read_only=True) as (_, cur):
if use_pg:
cur.execute(
"SELECT id, subject, content, sender_name, send_date, status, created_at, sent_at FROM newsletters WHERE id = %s",
(newsletter_id,),
)
else:
cur.execute(
"SELECT id, subject, content, sender_name, send_date, status, created_at, sent_at FROM newsletters WHERE id = ?",
(newsletter_id,),
)
row = cur.fetchone()
if row:
return {
"id": row[0],
"subject": row[1],
"content": row[2],
"sender_name": row[3],
"send_date": row[4],
"status": row[5],
"created_at": row[6],
"sent_at": row[7],
}
except Exception as exc:
logging.exception("Failed to get newsletter by ID: %s", exc)
raise
return None

56
server/factory.py Normal file
View File

@@ -0,0 +1,56 @@
"""Application factory for the Flask server."""
from __future__ import annotations
import logging
from flask import Flask
from . import logging_config, middleware, routes, settings
from .database import init_db, is_postgres_enabled
def _configure_sentry() -> None:
if not settings.SENTRY_DSN:
return
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=settings.SENTRY_DSN,
integrations=[FlaskIntegration()],
traces_sample_rate=settings.SENTRY_TRACES_SAMPLE_RATE,
)
logging.info("Sentry initialized")
except Exception:
logging.exception("Failed to initialize Sentry SDK")
def create_app() -> Flask:
"""Create and configure the Flask application instance."""
logging_config.configure_logging()
if settings.POSTGRES_URL:
try:
import psycopg2 # type: ignore # noqa: F401
except Exception:
logging.warning(
"POSTGRES_URL is set but psycopg2 is not installed; falling back to SQLite"
)
app = Flask(__name__)
app.config.from_mapping(SECRET_KEY=settings.SECRET_KEY)
app.template_folder = str(settings.BASE_DIR / "templates")
middleware.register_request_hooks(app)
routes.register_blueprints(app)
try:
init_db()
except Exception:
logging.exception("Failed to initialize DB at import time")
is_postgres_enabled()
_configure_sentry()
return app

65
server/logging_config.py Normal file
View File

@@ -0,0 +1,65 @@
"""Central logging configuration utilities."""
from __future__ import annotations
import importlib
import logging
from . import settings
JsonFormatter = None
try:
_json_module = importlib.import_module("pythonjsonlogger.json")
JsonFormatter = getattr(_json_module, "JsonFormatter", None)
except Exception:
try:
_json_module = importlib.import_module("pythonjsonlogger.jsonlogger")
JsonFormatter = getattr(_json_module, "JsonFormatter", None)
except Exception:
JsonFormatter = None
class RequestContextFilter(logging.Filter):
"""Inject request metadata into log records when a request context exists."""
def filter(self, record: logging.LogRecord) -> bool:
try:
from flask import has_request_context, request
if has_request_context():
rid = getattr(request, "request_id", None) or request.environ.get("HTTP_X_REQUEST_ID")
record.request_id = rid
record.remote_addr = request.remote_addr
record.path = request.path
record.method = request.method
else:
record.request_id = None
record.remote_addr = None
record.path = None
record.method = None
except Exception:
record.request_id = None
record.remote_addr = None
record.path = None
record.method = None
return True
def configure_logging() -> None:
"""Configure root logging handlers and optional JSON formatting."""
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] %(levelname)s in %(module)s: %(message)s",
)
if settings.ENABLE_JSON_LOGS and JsonFormatter is not None:
try:
handler = logging.getLogger().handlers[0]
handler.setFormatter(JsonFormatter("%(asctime)s %(levelname)s %(name)s %(message)s"))
except Exception:
logging.exception("Failed to initialize JSON log formatter")
try:
for handler in logging.getLogger().handlers:
handler.addFilter(RequestContextFilter())
except Exception:
pass

86
server/metrics.py Normal file
View File

@@ -0,0 +1,86 @@
"""Metrics registry and helpers for Prometheus and JSON fallbacks."""
from __future__ import annotations
import logging
import time
from typing import Any, Dict, Tuple
try:
from prometheus_client import CollectorRegistry, Counter, Histogram, generate_latest, CONTENT_TYPE_LATEST
except Exception:
CollectorRegistry = None # type: ignore
Counter = None # type: ignore
Histogram = None # type: ignore
generate_latest = None # type: ignore
CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8"
_start_time = time.time()
_total_submissions = 0
_prom_registry = CollectorRegistry() if CollectorRegistry is not None else None
_prom_total_submissions = (
Counter("contact_total_submissions", "Total contact submissions", registry=_prom_registry)
if Counter is not None
else None
)
_prom_request_counter = None
_prom_request_latency = None
if Counter is not None and _prom_registry is not None:
try:
_prom_request_counter = Counter(
"http_requests_total",
"Total HTTP requests",
["method", "endpoint"],
registry=_prom_registry,
)
except Exception:
_prom_request_counter = None
if Histogram is not None and _prom_registry is not None:
try:
_prom_request_latency = Histogram(
"http_request_duration_seconds",
"Request duration",
["method", "endpoint"],
registry=_prom_registry,
)
except Exception:
_prom_request_latency = None
def record_submission() -> None:
"""Register a completed contact submission."""
global _total_submissions
_total_submissions += 1
if _prom_total_submissions is not None:
try:
_prom_total_submissions.inc()
except Exception:
logging.debug("Failed to increment Prometheus submission counter", exc_info=True)
def observe_request(method: str, endpoint: str, start_time: float | None, status: int | None = None) -> None:
"""Update request counters and latency histograms."""
if _prom_request_counter is not None:
try:
_prom_request_counter.labels(method=method, endpoint=endpoint).inc()
except Exception:
logging.debug("Failed to increment request counter", exc_info=True)
if _prom_request_latency is not None and start_time:
try:
_prom_request_latency.labels(method=method, endpoint=endpoint).observe(time.time() - start_time)
except Exception:
logging.debug("Failed to observe request latency", exc_info=True)
def export_metrics() -> Tuple[Any, int, Dict[str, str]]:
"""Return a Flask-style response tuple for the metrics endpoint."""
uptime = int(time.time() - _start_time)
if generate_latest is not None and _prom_registry is not None:
payload = generate_latest(_prom_registry)
headers = {"Content-Type": CONTENT_TYPE_LATEST}
return payload, 200, headers
body = {"uptime_seconds": uptime, "total_submissions": _total_submissions}
return body, 200, {"Content-Type": "application/json"}

68
server/middleware.py Normal file
View File

@@ -0,0 +1,68 @@
"""HTTP middleware helpers (Flask request hooks)."""
from __future__ import annotations
import logging
import time
from flask import Flask, g, request
from . import settings
from .metrics import observe_request
from .utils import generate_request_id
def register_request_hooks(app: Flask) -> None:
"""Attach before/after request handlers for logging and correlation."""
@app.before_request
def attach_request_id_and_log(): # type: ignore[unused-ignore]
rid = request.headers.get("X-Request-Id")
if not rid:
rid = generate_request_id()
request.environ["HTTP_X_REQUEST_ID"] = rid
request.request_id = rid # type: ignore[attr-defined]
if settings.ENABLE_REQUEST_LOGS:
try:
logging.info(
"request.start",
extra={
"request_id": rid,
"method": request.method,
"path": request.path,
"remote_addr": request.remote_addr,
},
)
except Exception:
pass
try:
g._start_time = time.time()
except Exception:
g._start_time = None # type: ignore[attr-defined]
@app.after_request
def add_request_id_header(response): # type: ignore[unused-ignore]
try:
rid = getattr(request, "request_id", None) or request.environ.get("HTTP_X_REQUEST_ID")
if rid:
response.headers["X-Request-Id"] = rid
if settings.ENABLE_REQUEST_LOGS:
try:
logging.info(
"request.end",
extra={
"request_id": rid,
"status": response.status_code,
"path": request.path,
},
)
except Exception:
pass
start_time = getattr(g, "_start_time", None)
observe_request(request.method, request.path, start_time, response.status_code)
except Exception:
pass
return response

75
server/rate_limit.py Normal file
View File

@@ -0,0 +1,75 @@
"""Rate limiting helpers with optional Redis support."""
from __future__ import annotations
import logging
import os
import time
from collections import defaultdict, deque
from typing import DefaultDict, Deque
from . import settings
try:
import redis
except Exception: # redis is optional
redis = None # type: ignore
_rate_tracker: DefaultDict[str, Deque[float]] = defaultdict(deque)
def allow_request(client_ip: str) -> bool:
"""Return True when the client is allowed to make a request."""
if settings.RATE_LIMIT_MAX <= 0:
return True
if settings.REDIS_URL and redis is not None:
try:
client = redis.from_url(settings.REDIS_URL, decode_responses=True)
key = f"rl:{client_ip}"
lua = (
"local key=KEYS[1]\n"
"local now=tonumber(ARGV[1])\n"
"local window=tonumber(ARGV[2])\n"
"local limit=tonumber(ARGV[3])\n"
"local member=ARGV[4]\n"
"redis.call('ZADD', key, now, member)\n"
"redis.call('ZREMRANGEBYSCORE', key, 0, now - window)\n"
"local cnt = redis.call('ZCARD', key)\n"
"redis.call('EXPIRE', key, window)\n"
"if cnt > limit then return 0 end\n"
"return cnt\n"
)
now_ts = int(time.time() * 1000)
member = f"{now_ts}-{os.getpid()}-{int(time.time_ns() % 1000000)}"
result = client.eval(
lua,
1,
key,
str(now_ts),
str(settings.RATE_LIMIT_WINDOW * 1000),
str(settings.RATE_LIMIT_MAX),
member,
)
try:
count = int(str(result))
except Exception:
logging.exception("Unexpected Redis eval result: %r", result)
return False
return count != 0
except Exception as exc:
logging.exception("Redis rate limiter error, falling back to memory: %s", exc)
now = time.time()
bucket = _rate_tracker[client_ip]
while bucket and now - bucket[0] > settings.RATE_LIMIT_WINDOW:
bucket.popleft()
if len(bucket) >= settings.RATE_LIMIT_MAX:
return False
bucket.append(now)
if len(bucket) > settings.RATE_LIMIT_MAX * 2:
while len(bucket) > settings.RATE_LIMIT_MAX:
bucket.popleft()
return True

15
server/routes/__init__.py Normal file
View File

@@ -0,0 +1,15 @@
"""Blueprint registration for the server application."""
from __future__ import annotations
from flask import Flask
from . import admin, auth, contact, monitoring, newsletter
def register_blueprints(app: Flask) -> None:
"""Register all HTTP blueprints with the Flask app."""
app.register_blueprint(contact.bp)
app.register_blueprint(newsletter.bp)
app.register_blueprint(monitoring.bp)
app.register_blueprint(auth.bp)
app.register_blueprint(admin.bp)

377
server/routes/admin.py Normal file
View File

@@ -0,0 +1,377 @@
"""Admin routes for application management."""
from __future__ import annotations
from flask import Blueprint, render_template, jsonify, request
import logging
from .. import auth, settings
from ..database import delete_app_setting, get_app_settings, get_subscribers, update_app_setting
bp = Blueprint("admin", __name__, url_prefix="/admin")
@bp.route("/")
@auth.login_required
def dashboard():
"""Display admin dashboard overview."""
return render_template("admin_dashboard.html")
@bp.route("/newsletter")
@auth.login_required
def newsletter_subscribers():
"""Display newsletter subscriber management page."""
return render_template("admin_newsletter.html")
@bp.route("/newsletter/create")
@auth.login_required
def newsletter_create():
"""Display newsletter creation and sending page."""
return render_template("admin_newsletter_create.html")
@bp.route("/settings")
@auth.login_required
def settings_page():
"""Display current application settings."""
# Gather settings to display
app_settings = {
"Database": {
"DATABASE_URL": settings.DATABASE_URL or "sqlite:///./data/forms.db",
"POSTGRES_URL": settings.POSTGRES_URL or "Not configured",
"SQLite Path": str(settings.SQLITE_DB_PATH),
},
"SMTP": {
"Host": settings.SMTP_SETTINGS["host"] or "Not configured",
"Port": settings.SMTP_SETTINGS["port"],
"Username": settings.SMTP_SETTINGS["username"] or "Not configured",
"Sender": settings.SMTP_SETTINGS["sender"] or "Not configured",
"Recipients": ", ".join(settings.SMTP_SETTINGS["recipients"]) if settings.SMTP_SETTINGS["recipients"] else "Not configured",
"Use TLS": settings.SMTP_SETTINGS["use_tls"],
},
"Rate Limiting": {
"Max Requests": settings.RATE_LIMIT_MAX,
"Window (seconds)": settings.RATE_LIMIT_WINDOW,
"Redis URL": settings.REDIS_URL or "Not configured",
},
"Security": {
"Strict Origin Check": settings.STRICT_ORIGIN_CHECK,
"Allowed Origin": settings.ALLOWED_ORIGIN or "Not configured",
},
"Logging": {
"JSON Logs": settings.ENABLE_JSON_LOGS,
"Request Logs": settings.ENABLE_REQUEST_LOGS,
},
"Monitoring": {
"Sentry DSN": settings.SENTRY_DSN or "Not configured",
"Sentry Traces Sample Rate": settings.SENTRY_TRACES_SAMPLE_RATE,
},
"Admin": {
"Username": settings.ADMIN_USERNAME,
},
}
return render_template("admin_settings.html", settings=app_settings)
@bp.route("/submissions")
@auth.login_required
def submissions():
"""Display contact form submissions page."""
return render_template("admin_submissions.html")
@bp.route("/api/settings", methods=["GET"])
@auth.login_required
def get_settings_api():
"""Get all application settings via API."""
try:
settings_data = get_app_settings()
return jsonify({"status": "ok", "settings": settings_data})
except Exception as exc:
logging.exception("Failed to retrieve settings: %s", exc)
return jsonify({"status": "error", "message": "Failed to retrieve settings."}), 500
def validate_setting(key: str, value: str) -> str | None:
"""Validate a setting key-value pair. Returns error message or None if valid."""
# Define validation rules for known settings
validations = {
"maintenance_mode": lambda v: v in ["true", "false"],
"contact_form_enabled": lambda v: v in ["true", "false"],
"newsletter_enabled": lambda v: v in ["true", "false"],
"rate_limit_max": lambda v: v.isdigit() and 0 <= int(v) <= 1000,
"rate_limit_window": lambda v: v.isdigit() and 1 <= int(v) <= 3600,
}
if key in validations and not validations[key](value):
return f"Invalid value for {key}"
# General validation
if len(key) > 100:
return "Setting key too long (max 100 characters)"
if len(value) > 1000:
return "Setting value too long (max 1000 characters)"
return None
@bp.route("/api/settings/<key>", methods=["PUT"])
@auth.login_required
def update_setting_api(key: str):
"""Update a specific application setting via API."""
try:
data = request.get_json(silent=True) or {}
value = data.get("value", "").strip()
if not value:
return jsonify({"status": "error", "message": "Value is required."}), 400
# Validate the setting
validation_error = validate_setting(key, value)
if validation_error:
return jsonify({"status": "error", "message": validation_error}), 400
success = update_app_setting(key, value)
if success:
return jsonify({"status": "ok", "message": f"Setting '{key}' updated successfully."})
else:
return jsonify({"status": "error", "message": "Failed to update setting."}), 500
except Exception as exc:
logging.exception("Failed to update setting: %s", exc)
return jsonify({"status": "error", "message": "Failed to update setting."}), 500
@bp.route("/api/settings/<key>", methods=["DELETE"])
@auth.login_required
def delete_setting_api(key: str):
"""Delete a specific application setting via API."""
try:
deleted = delete_app_setting(key)
if deleted:
return jsonify({"status": "ok", "message": f"Setting '{key}' deleted successfully."})
else:
return jsonify({"status": "error", "message": f"Setting '{key}' not found."}), 404
except Exception as exc:
logging.exception("Failed to delete setting: %s", exc)
return jsonify({"status": "error", "message": "Failed to delete setting."}), 500
@bp.route("/api/newsletter", methods=["GET"])
@auth.login_required
def get_subscribers_api():
"""Retrieve newsletter subscribers with pagination, filtering, and sorting."""
try:
# Parse query parameters
page = int(request.args.get("page", 1))
per_page = min(int(request.args.get("per_page", 50)),
100) # Max 100 per page
sort_by = request.args.get("sort_by", "subscribed_at")
sort_order = request.args.get("sort_order", "desc")
email_filter = request.args.get("email")
# Validate sort_by
valid_sort_fields = ["email", "subscribed_at"]
if sort_by not in valid_sort_fields:
sort_by = "subscribed_at"
# Get subscribers
subscribers, total = get_subscribers(
page=page,
per_page=per_page,
sort_by=sort_by,
sort_order=sort_order,
email_filter=email_filter,
)
return jsonify({
"status": "ok",
"subscribers": subscribers,
"pagination": {
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
},
})
except Exception as exc:
logging.exception("Failed to retrieve subscribers: %s", exc)
return jsonify({"status": "error", "message": "Failed to retrieve subscribers."}), 500
@bp.route("/api/newsletters", methods=["POST"])
@auth.login_required
def create_newsletter_api():
"""Create a new newsletter."""
try:
data = request.get_json(silent=True) or {}
subject = data.get("subject", "").strip()
content = data.get("content", "").strip()
sender_name = data.get("sender_name", "").strip() or None
send_date = data.get("send_date", "").strip() or None
status = data.get("status", "draft")
if not subject or not content:
return jsonify({"status": "error", "message": "Subject and content are required."}), 400
if status not in ["draft", "scheduled", "sent"]:
return jsonify({"status": "error", "message": "Invalid status."}), 400
from ..database import save_newsletter
newsletter_id = save_newsletter(
subject, content, sender_name, send_date, status)
return jsonify({
"status": "ok",
"message": "Newsletter created successfully.",
"newsletter_id": newsletter_id
}), 201
except Exception as exc:
logging.exception("Failed to create newsletter: %s", exc)
return jsonify({"status": "error", "message": "Failed to create newsletter."}), 500
@bp.route("/api/newsletters", methods=["GET"])
@auth.login_required
def get_newsletters_api():
"""Retrieve newsletters with pagination and filtering."""
try:
page = int(request.args.get("page", 1))
per_page = min(int(request.args.get("per_page", 20)),
50) # Max 50 per page
status_filter = request.args.get("status")
from ..database import get_newsletters
newsletters, total = get_newsletters(
page=page, per_page=per_page, status_filter=status_filter)
return jsonify({
"status": "ok",
"newsletters": newsletters,
"pagination": {
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
},
})
except Exception as exc:
logging.exception("Failed to retrieve newsletters: %s", exc)
return jsonify({"status": "error", "message": "Failed to retrieve newsletters."}), 500
@bp.route("/api/newsletters/<int:newsletter_id>/send", methods=["POST"])
@auth.login_required
def send_newsletter_api(newsletter_id: int):
"""Send a newsletter to all subscribers."""
try:
from ..database import get_newsletter_by_id, update_newsletter_status, get_subscribers
from ..services.newsletter import send_newsletter_to_subscribers
from datetime import datetime, timezone
# Get the newsletter
newsletter = get_newsletter_by_id(newsletter_id)
if not newsletter:
return jsonify({"status": "error", "message": "Newsletter not found."}), 404
if newsletter["status"] == "sent":
return jsonify({"status": "error", "message": "Newsletter has already been sent."}), 400
# Get all subscribers
subscribers, _ = get_subscribers(
page=1, per_page=10000) # Get all subscribers
if not subscribers:
return jsonify({"status": "error", "message": "No subscribers found."}), 400
# Send the newsletter
success_count = send_newsletter_to_subscribers(
newsletter["subject"],
newsletter["content"],
[sub["email"] for sub in subscribers],
newsletter["sender_name"]
)
# Update newsletter status
sent_at = datetime.now(timezone.utc).isoformat()
update_newsletter_status(newsletter_id, "sent", sent_at)
return jsonify({
"status": "ok",
"message": f"Newsletter sent to {success_count} subscribers.",
"sent_count": success_count
})
except Exception as exc:
logging.exception("Failed to send newsletter: %s", exc)
return jsonify({"status": "error", "message": "Failed to send newsletter."}), 500
@bp.route("/api/contact", methods=["GET"])
@auth.login_required
def get_contact_submissions_api():
"""Retrieve contact form submissions with pagination, filtering, and sorting."""
try:
# Parse query parameters
page = int(request.args.get("page", 1))
per_page = min(int(request.args.get("per_page", 50)),
100) # Max 100 per page
sort_by = request.args.get("sort_by", "created_at")
sort_order = request.args.get("sort_order", "desc")
email_filter = request.args.get("email")
date_from = request.args.get("date_from")
date_to = request.args.get("date_to")
# Validate sort_by
valid_sort_fields = ["id", "name", "email", "created_at"]
if sort_by not in valid_sort_fields:
sort_by = "created_at"
# Get submissions
from ..database import get_contacts
submissions, total = get_contacts(
page=page,
per_page=per_page,
sort_by=sort_by,
sort_order=sort_order,
email_filter=email_filter,
date_from=date_from,
date_to=date_to,
)
return jsonify({
"status": "ok",
"submissions": submissions,
"pagination": {
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
},
})
except Exception as exc:
logging.exception("Failed to retrieve contact submissions: %s", exc)
return jsonify({"status": "error", "message": "Failed to retrieve contact submissions."}), 500
@bp.route("/api/contact/<int:contact_id>", methods=["DELETE"])
@auth.login_required
def delete_contact_submission_api(contact_id: int):
"""Delete a contact submission by ID."""
try:
from ..database import delete_contact
deleted = delete_contact(contact_id)
if deleted:
return jsonify({"status": "ok", "message": f"Contact submission {contact_id} deleted successfully."})
else:
return jsonify({"status": "error", "message": f"Contact submission {contact_id} not found."}), 404
except Exception as exc:
logging.exception("Failed to delete contact submission: %s", exc)
return jsonify({"status": "error", "message": "Failed to delete contact submission."}), 500

31
server/routes/auth.py Normal file
View File

@@ -0,0 +1,31 @@
"""Authentication routes for admin access."""
from __future__ import annotations
from flask import Blueprint, flash, redirect, render_template, request, session, url_for
from .. import settings
bp = Blueprint("auth", __name__, url_prefix="/auth")
@bp.route("/login", methods=["GET", "POST"])
def login():
"""Handle user login."""
if request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
if username == settings.ADMIN_USERNAME and password == settings.ADMIN_PASSWORD:
session["logged_in"] = True
return redirect("/admin/")
else:
flash("Invalid credentials")
return render_template("login.html")
@bp.route("/logout")
def logout():
"""Handle user logout."""
session.pop("logged_in", None)
return redirect("/auth/login")

134
server/routes/contact.py Normal file
View File

@@ -0,0 +1,134 @@
"""Contact submission routes."""
from __future__ import annotations
import logging
from flask import Blueprint, jsonify, request
from .. import auth, settings
from ..database import delete_contact, get_contacts
from ..rate_limit import allow_request
from ..services.contact import persist_submission, send_notification, validate_submission
bp = Blueprint("contact", __name__, url_prefix="/api")
@bp.route("/contact", methods=["POST"])
def receive_contact():
payload = request.form or request.get_json(silent=True) or {}
if settings.STRICT_ORIGIN_CHECK:
origin = request.headers.get("Origin")
referer = request.headers.get("Referer")
allowed = settings.ALLOWED_ORIGIN
if allowed:
if origin and origin != allowed and not (referer and referer.startswith(allowed)):
logging.warning(
"Origin/Referer mismatch (origin=%s, referer=%s)", origin, referer)
return jsonify({"status": "error", "message": "Invalid request origin."}), 403
else:
logging.warning(
"STRICT_ORIGIN_CHECK enabled but ALLOWED_ORIGIN not set; skipping enforcement")
client_ip_source = request.headers.get(
"X-Forwarded-For", request.remote_addr or "unknown")
client_ip = client_ip_source.split(
",")[0].strip() if client_ip_source else "unknown"
if not allow_request(client_ip):
logging.warning("Rate limit reached for %s", client_ip)
return (
jsonify(
{"status": "error", "message": "Too many submissions, please try later."}),
429,
)
submission, errors = validate_submission(payload)
if errors:
return jsonify({"status": "error", "errors": errors}), 400
assert submission is not None
try:
record_id = persist_submission(submission)
except Exception as exc: # pragma: no cover - logged for diagnostics
logging.exception("Failed to persist submission: %s", exc)
return (
jsonify({"status": "error", "message": "Could not store submission."}),
500,
)
email_sent = send_notification(submission)
status = 201 if email_sent else 202
body = {
"status": "ok",
"id": record_id,
"email": "sent" if email_sent else "pending",
}
if not email_sent:
body["message"] = "Submission stored but email dispatch is not configured."
return jsonify(body), status
@bp.route("/contact", methods=["GET"])
@auth.login_required
def get_submissions():
"""Retrieve contact form submissions with pagination, filtering, and sorting."""
try:
# Parse query parameters
page = int(request.args.get("page", 1))
per_page = min(int(request.args.get("per_page", 50)), 100) # Max 100 per page
sort_by = request.args.get("sort_by", "created_at")
sort_order = request.args.get("sort_order", "desc")
email_filter = request.args.get("email")
date_from = request.args.get("date_from")
date_to = request.args.get("date_to")
# Validate sort_by
valid_sort_fields = ["id", "name", "email", "created_at"]
if sort_by not in valid_sort_fields:
sort_by = "created_at"
# Get submissions
submissions, total = get_contacts(
page=page,
per_page=per_page,
sort_by=sort_by,
sort_order=sort_order,
email_filter=email_filter,
date_from=date_from,
date_to=date_to,
)
return jsonify({
"status": "ok",
"submissions": submissions,
"pagination": {
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
},
})
except Exception as exc:
logging.exception("Failed to retrieve submissions: %s", exc)
return jsonify({"status": "error", "message": "Failed to retrieve submissions."}), 500
@bp.route("/contact/<int:contact_id>", methods=["DELETE"])
@auth.login_required
def delete_submission(contact_id: int):
"""Delete a contact submission by ID."""
try:
deleted = delete_contact(contact_id)
if not deleted:
return jsonify({"status": "error", "message": "Submission not found."}), 404
return jsonify({"status": "ok", "message": "Submission deleted successfully."})
except Exception as exc:
logging.exception("Failed to delete submission: %s", exc)
return jsonify({"status": "error", "message": "Failed to delete submission."}), 500

View File

@@ -0,0 +1,33 @@
"""Operational monitoring routes."""
from __future__ import annotations
import logging
from flask import Blueprint, jsonify
from ..database import db_cursor
from ..metrics import export_metrics
bp = Blueprint("monitoring", __name__)
@bp.route("/health", methods=["GET"])
def health():
"""Simple health endpoint used by orchestrators and Docker HEALTHCHECK."""
try:
with db_cursor(read_only=True) as (_, cur):
cur.execute("SELECT 1")
cur.fetchone()
except Exception as exc: # pragma: no cover - logged for operators
logging.exception("Health check DB failure: %s", exc)
return jsonify({"status": "unhealthy"}), 500
return jsonify({"status": "ok"}), 200
@bp.route("/metrics", methods=["GET"])
def metrics():
payload, status, headers = export_metrics()
if isinstance(payload, dict):
return jsonify(payload), status
return payload, status, headers

133
server/routes/newsletter.py Normal file
View File

@@ -0,0 +1,133 @@
"""Newsletter subscription routes."""
from __future__ import annotations
import logging
from flask import Blueprint, jsonify, request, render_template
from ..services import newsletter
bp = Blueprint("newsletter", __name__, url_prefix="/api")
@bp.route("/newsletter", methods=["POST"])
def subscribe():
payload = request.form or request.get_json(silent=True) or {}
email = (payload.get("email") or "").strip()
if not newsletter.validate_email(email):
return jsonify({"status": "error", "message": "Valid email is required."}), 400
try:
created = newsletter.subscribe(email)
except Exception as exc: # pragma: no cover - errors are logged
logging.exception("Failed to persist subscriber: %s", exc)
return jsonify({"status": "error", "message": "Could not store subscription."}), 500
if not created:
logging.info("Newsletter subscription ignored (duplicate): %s", email)
return jsonify({"status": "error", "message": "Email is already subscribed."}), 409
logging.info("New newsletter subscription: %s", email)
return jsonify({"status": "ok", "message": "Subscribed successfully."}), 201
@bp.route("/newsletter", methods=["DELETE"])
def unsubscribe():
payload = request.form or request.get_json(silent=True) or {}
email = (payload.get("email") or "").strip()
if not newsletter.validate_email(email):
return jsonify({"status": "error", "message": "Valid email is required."}), 400
try:
deleted = newsletter.unsubscribe(email)
except Exception as exc: # pragma: no cover - errors are logged
logging.exception("Failed to remove subscriber: %s", exc)
return jsonify({"status": "error", "message": "Could not remove subscription."}), 500
if not deleted:
logging.info(
"Newsletter unsubscription ignored (not subscribed): %s", email)
return jsonify({"status": "error", "message": "Email is not subscribed."}), 404
logging.info("Newsletter unsubscription: %s", email)
return jsonify({"status": "ok", "message": "Unsubscribed successfully."}), 200
@bp.route("/newsletter", methods=["PUT"])
def update_subscription():
payload = request.form or request.get_json(silent=True) or {}
old_email = (payload.get("old_email") or "").strip()
new_email = (payload.get("new_email") or "").strip()
if not newsletter.validate_email(old_email) or not newsletter.validate_email(new_email):
return jsonify({"status": "error", "message": "Valid old and new emails are required."}), 400
try:
updated = newsletter.update_email(old_email, new_email)
except Exception as exc: # pragma: no cover - errors are logged
logging.exception("Failed to update subscriber: %s", exc)
return jsonify({"status": "error", "message": "Could not update subscription."}), 500
if not updated:
return jsonify({"status": "error", "message": "Old email not found or new email already exists."}), 404
logging.info("Newsletter subscription updated: %s -> %s",
old_email, new_email)
return jsonify({"status": "ok", "message": "Subscription updated successfully."}), 200
@bp.route("/newsletter/manage", methods=["GET", "POST"])
def manage_subscription():
"""Display newsletter subscription management page."""
message = None
message_type = None
if request.method == "POST":
action = request.form.get("action")
email = (request.form.get("email") or "").strip()
if not newsletter.validate_email(email):
message = "Please enter a valid email address."
message_type = "error"
else:
try:
if action == "subscribe":
created = newsletter.subscribe(email)
if created:
message = "Successfully subscribed to newsletter!"
message_type = "success"
else:
message = "This email is already subscribed."
message_type = "info"
elif action == "unsubscribe":
deleted = newsletter.unsubscribe(email)
if deleted:
message = "Successfully unsubscribed from newsletter."
message_type = "success"
else:
message = "This email is not currently subscribed."
message_type = "info"
elif action == "update":
old_email = (request.form.get("old_email") or "").strip()
if not newsletter.validate_email(old_email):
message = "Please enter a valid current email address."
message_type = "error"
elif old_email == email:
message = "New email must be different from current email."
message_type = "error"
else:
updated = newsletter.update_email(old_email, email)
if updated:
message = "Email address updated successfully!"
message_type = "success"
else:
message = "Current email not found or new email already exists."
message_type = "error"
except Exception as exc:
logging.exception("Failed to manage subscription: %s", exc)
message = "An error occurred. Please try again."
message_type = "error"
return render_template("newsletter_manage.html", message=message, message_type=message_type)

View File

@@ -0,0 +1 @@
"""Service layer namespace."""

112
server/services/contact.py Normal file
View File

@@ -0,0 +1,112 @@
"""Business logic for contact submissions."""
from __future__ import annotations
import logging
import smtplib
from dataclasses import dataclass
from datetime import datetime, timezone
from email.message import EmailMessage
from typing import Any, Dict, Tuple
from .. import settings
from ..database import save_contact
from ..metrics import record_submission
from ..utils import is_valid_email
@dataclass
class ContactSubmission:
name: str
email: str
company: str | None
message: str
timeline: str | None
created_at: str = datetime.now(timezone.utc).isoformat()
def validate_submission(raw: Dict[str, Any]) -> Tuple[ContactSubmission | None, Dict[str, str]]:
"""Validate the incoming payload and return a submission object."""
name = (raw.get("name") or "").strip()
email = (raw.get("email") or "").strip()
message = (raw.get("message") or "").strip()
consent = raw.get("consent")
company = (raw.get("company") or "").strip()
errors: Dict[str, str] = {}
if not name:
errors["name"] = "Name is required."
elif len(name) > 200:
errors["name"] = "Name is too long (max 200 chars)."
if not is_valid_email(email):
errors["email"] = "Valid email is required."
if not message:
errors["message"] = "Message is required."
elif len(message) > 5000:
errors["message"] = "Message is too long (max 5000 chars)."
if not consent:
errors["consent"] = "Consent is required."
if company and len(company) > 200:
errors["company"] = "Organisation name is too long (max 200 chars)."
if errors:
return None, errors
submission = ContactSubmission(
name=name,
email=email,
company=company or None,
message=message,
timeline=(raw.get("timeline") or "").strip() or None,
)
return submission, {}
def persist_submission(submission: ContactSubmission) -> int:
"""Persist the submission and update metrics."""
record_id = save_contact(submission)
record_submission()
return record_id
def send_notification(submission: ContactSubmission) -> bool:
"""Send an email notification for the submission if SMTP is configured."""
if not settings.SMTP_SETTINGS["host"] or not settings.SMTP_SETTINGS["recipients"]:
logging.info("SMTP not configured; skipping email notification")
return False
sender = settings.SMTP_SETTINGS["sender"] or "no-reply@example.com"
recipients = settings.SMTP_SETTINGS["recipients"]
msg = EmailMessage()
msg["Subject"] = f"Neue Kontaktanfrage von {submission.name}"
msg["From"] = sender
msg["To"] = ", ".join(recipients)
msg.set_content(
"\n".join(
[
f"Name: {submission.name}",
f"E-Mail: {submission.email}",
f"Organisation: {submission.company or ''}",
f"Zeithorizont: {submission.timeline or ''}",
"",
"Nachricht:",
submission.message,
"",
f"Eingang: {submission.created_at}",
]
)
)
try:
with smtplib.SMTP(settings.SMTP_SETTINGS["host"], settings.SMTP_SETTINGS["port"], timeout=15) as server:
if settings.SMTP_SETTINGS["use_tls"]:
server.starttls()
if settings.SMTP_SETTINGS["username"]:
server.login(
settings.SMTP_SETTINGS["username"], settings.SMTP_SETTINGS["password"] or "")
server.send_message(msg)
logging.info("Notification email dispatched to %s", recipients)
return True
except Exception as exc: # pragma: no cover - SMTP failures are logged only
logging.error("Failed to send notification email: %s", exc)
return False

View File

@@ -0,0 +1,96 @@
"""Business logic for newsletter subscriptions."""
from __future__ import annotations
from datetime import datetime, timezone
from ..database import save_subscriber, delete_subscriber, update_subscriber
from ..utils import is_valid_email
def validate_email(email: str) -> bool:
"""Return True when the provided email passes a basic sanity check."""
return is_valid_email(email)
def subscribe(email: str) -> bool:
"""Persist the subscription and return False when it already exists."""
created_at = datetime.now(timezone.utc).isoformat()
return save_subscriber(email, created_at=created_at)
def unsubscribe(email: str) -> bool:
"""Remove the subscription and return True if it existed."""
return delete_subscriber(email)
def update_email(old_email: str, new_email: str) -> bool:
"""Update the email for a subscription. Return True if updated."""
return update_subscriber(old_email, new_email)
def send_newsletter_to_subscribers(subject: str, content: str, emails: list[str], sender_name: str | None = None) -> int:
"""Send newsletter to list of email addresses. Returns count of successful sends."""
import logging
from .. import settings
if not settings.SMTP_SETTINGS["host"]:
logging.error("SMTP not configured, cannot send newsletter")
return 0
try:
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
# Create message
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = settings.SMTP_SETTINGS["sender"] or "noreply@example.com"
# Format content
formatted_content = content.replace('\n', '<br>')
html_content = f"""
<html>
<body>
{formatted_content}
</body>
</html>
"""
# Add HTML content
html_part = MIMEText(html_content, 'html')
msg.attach(html_part)
# Send to each recipient individually for better deliverability
success_count = 0
with smtplib.SMTP(settings.SMTP_SETTINGS["host"], settings.SMTP_SETTINGS["port"]) as server:
if settings.SMTP_SETTINGS["use_tls"]:
server.starttls()
if settings.SMTP_SETTINGS["username"] and settings.SMTP_SETTINGS["password"]:
server.login(
settings.SMTP_SETTINGS["username"], settings.SMTP_SETTINGS["password"])
for email in emails:
try:
# Create a fresh copy for each recipient
recipient_msg = MIMEMultipart('alternative')
recipient_msg['Subject'] = subject
recipient_msg['From'] = msg['From']
recipient_msg['To'] = email
# Add HTML content
recipient_msg.attach(MIMEText(html_content, 'html'))
server.sendmail(msg['From'], email,
recipient_msg.as_string())
success_count += 1
except Exception as exc:
logging.exception(
"Failed to send newsletter to %s: %s", email, exc)
return success_count
except Exception as exc:
logging.exception("Failed to send newsletter: %s", exc)
return 0

65
server/settings.py Normal file
View File

@@ -0,0 +1,65 @@
"""Environment driven configuration values."""
from __future__ import annotations
import os
import re
from pathlib import Path
from dotenv import load_dotenv
from .utils import normalize_recipients
load_dotenv()
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = os.getenv("FLASK_SECRET_KEY", "dev")
SENTRY_DSN = os.getenv("SENTRY_DSN")
SENTRY_TRACES_SAMPLE_RATE = float(
os.getenv("SENTRY_TRACES_SAMPLE_RATE", "0.0"))
ENABLE_REQUEST_LOGS = os.getenv("ENABLE_REQUEST_LOGS", "true").lower() in {
"1", "true", "yes"}
ENABLE_JSON_LOGS = os.getenv("ENABLE_JSON_LOGS", "false").lower() in {
"1", "true", "yes"}
DATABASE_URL = os.getenv("DATABASE_URL")
POSTGRES_URL = os.getenv("POSTGRES_URL")
def resolve_sqlite_path() -> Path:
"""Resolve the configured SQLite path honoring DATABASE_URL."""
if DATABASE_URL:
if DATABASE_URL.startswith("sqlite:"):
match = re.match(r"sqlite:(?:////?|)(.+)", DATABASE_URL)
if match:
return Path(match.group(1))
return Path("data/forms.db")
return Path(DATABASE_URL)
return BASE_DIR / "data" / "forms.db"
SQLITE_DB_PATH = resolve_sqlite_path()
RATE_LIMIT_MAX = int(os.getenv("RATE_LIMIT_MAX", "10"))
RATE_LIMIT_WINDOW = int(os.getenv("RATE_LIMIT_WINDOW", "60"))
REDIS_URL = os.getenv("REDIS_URL")
STRICT_ORIGIN_CHECK = os.getenv("STRICT_ORIGIN_CHECK", "false").lower() in {
"1", "true", "yes"}
ALLOWED_ORIGIN = os.getenv("ALLOWED_ORIGIN")
SMTP_SETTINGS = {
"host": os.getenv("SMTP_HOST"),
"port": int(os.getenv("SMTP_PORT", "587")),
"username": os.getenv("SMTP_USERNAME"),
"password": os.getenv("SMTP_PASSWORD"),
"sender": os.getenv("SMTP_SENDER"),
"use_tls": os.getenv("SMTP_USE_TLS", "true").lower() in {"1", "true", "yes"},
"recipients": normalize_recipients(os.getenv("SMTP_RECIPIENTS")),
}
if not SMTP_SETTINGS["sender"] and SMTP_SETTINGS["username"]:
SMTP_SETTINGS["sender"] = SMTP_SETTINGS["username"]
ADMIN_USERNAME = os.getenv("ADMIN_USERNAME", "admin")
ADMIN_PASSWORD = os.getenv("ADMIN_PASSWORD", "admin")

23
server/utils.py Normal file
View File

@@ -0,0 +1,23 @@
"""Common utility helpers for the server package."""
from __future__ import annotations
import uuid
from typing import Iterable, List
def normalize_recipients(value: str | None) -> List[str]:
"""Split a comma separated string of emails into a clean list."""
if not value:
return []
return [item.strip() for item in value.split(",") if item.strip()]
def is_valid_email(value: str) -> bool:
"""Perform a very small sanity check for email addresses."""
value = value.strip()
return bool(value and "@" in value)
def generate_request_id() -> str:
"""Return a UUID4 string for request correlation."""
return str(uuid.uuid4())