Refactor test cases for improved readability and consistency
- Updated test functions in various test files to enhance code clarity by formatting long lines and improving indentation. - Adjusted assertions to use multi-line formatting for better readability. - Added new test cases for theme settings API to ensure proper functionality. - Ensured consistent use of line breaks and spacing across test files for uniformity.
This commit is contained in:
@@ -39,6 +39,7 @@ from psycopg2 import extensions
|
||||
from psycopg2.extensions import connection as PGConnection, parse_dsn
|
||||
from dotenv import load_dotenv
|
||||
from sqlalchemy import create_engine, inspect
|
||||
|
||||
ROOT_DIR = Path(__file__).resolve().parents[1]
|
||||
if str(ROOT_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT_DIR))
|
||||
@@ -125,8 +126,7 @@ class DatabaseConfig:
|
||||
]
|
||||
if missing:
|
||||
raise RuntimeError(
|
||||
"Missing required database configuration: " +
|
||||
", ".join(missing)
|
||||
"Missing required database configuration: " + ", ".join(missing)
|
||||
)
|
||||
|
||||
host = cast(str, host)
|
||||
@@ -208,12 +208,17 @@ class DatabaseConfig:
|
||||
class DatabaseSetup:
|
||||
"""Encapsulates the full setup workflow."""
|
||||
|
||||
def __init__(self, config: DatabaseConfig, *, dry_run: bool = False) -> None:
|
||||
def __init__(
|
||||
self, config: DatabaseConfig, *, dry_run: bool = False
|
||||
) -> None:
|
||||
self.config = config
|
||||
self.dry_run = dry_run
|
||||
self._models_loaded = False
|
||||
self._rollback_actions: list[tuple[str, Callable[[], None]]] = []
|
||||
def _register_rollback(self, label: str, action: Callable[[], None]) -> None:
|
||||
|
||||
def _register_rollback(
|
||||
self, label: str, action: Callable[[], None]
|
||||
) -> None:
|
||||
if self.dry_run:
|
||||
return
|
||||
self._rollback_actions.append((label, action))
|
||||
@@ -237,7 +242,6 @@ class DatabaseSetup:
|
||||
def clear_rollbacks(self) -> None:
|
||||
self._rollback_actions.clear()
|
||||
|
||||
|
||||
def _describe_connection(self, user: str, database: str) -> str:
|
||||
return f"{user}@{self.config.host}:{self.config.port}/{database}"
|
||||
|
||||
@@ -384,9 +388,9 @@ class DatabaseSetup:
|
||||
try:
|
||||
if self.config.password:
|
||||
cursor.execute(
|
||||
sql.SQL("CREATE ROLE {} WITH LOGIN PASSWORD %s").format(
|
||||
sql.Identifier(self.config.user)
|
||||
),
|
||||
sql.SQL(
|
||||
"CREATE ROLE {} WITH LOGIN PASSWORD %s"
|
||||
).format(sql.Identifier(self.config.user)),
|
||||
(self.config.password,),
|
||||
)
|
||||
else:
|
||||
@@ -589,8 +593,7 @@ class DatabaseSetup:
|
||||
return psycopg2.connect(dsn)
|
||||
except psycopg2.Error as exc:
|
||||
raise RuntimeError(
|
||||
"Unable to establish admin connection. "
|
||||
f"Target: {descriptor}"
|
||||
"Unable to establish admin connection. " f"Target: {descriptor}"
|
||||
) from exc
|
||||
|
||||
def _application_connection(self) -> PGConnection:
|
||||
@@ -645,7 +648,9 @@ class DatabaseSetup:
|
||||
importlib.import_module(f"{package.__name__}.{module_info.name}")
|
||||
self._models_loaded = True
|
||||
|
||||
def run_migrations(self, migrations_dir: Optional[Path | str] = None) -> None:
|
||||
def run_migrations(
|
||||
self, migrations_dir: Optional[Path | str] = None
|
||||
) -> None:
|
||||
"""Execute pending SQL migrations in chronological order."""
|
||||
|
||||
directory = (
|
||||
@@ -673,7 +678,8 @@ class DatabaseSetup:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cursor:
|
||||
table_exists = self._migrations_table_exists(
|
||||
cursor, schema_name)
|
||||
cursor, schema_name
|
||||
)
|
||||
if not table_exists:
|
||||
if self.dry_run:
|
||||
logger.info(
|
||||
@@ -692,12 +698,10 @@ class DatabaseSetup:
|
||||
applied = set()
|
||||
else:
|
||||
applied = self._fetch_applied_migrations(
|
||||
cursor, schema_name)
|
||||
cursor, schema_name
|
||||
)
|
||||
|
||||
if (
|
||||
baseline_path.exists()
|
||||
and baseline_name not in applied
|
||||
):
|
||||
if baseline_path.exists() and baseline_name not in applied:
|
||||
if self.dry_run:
|
||||
logger.info(
|
||||
"Dry run: baseline migration '%s' pending; would apply and mark legacy files",
|
||||
@@ -756,9 +760,7 @@ class DatabaseSetup:
|
||||
)
|
||||
|
||||
pending = [
|
||||
path
|
||||
for path in migration_files
|
||||
if path.name not in applied
|
||||
path for path in migration_files if path.name not in applied
|
||||
]
|
||||
|
||||
if not pending:
|
||||
@@ -792,9 +794,7 @@ class DatabaseSetup:
|
||||
cursor.execute(
|
||||
sql.SQL(
|
||||
"INSERT INTO {} (filename, applied_at) VALUES (%s, NOW())"
|
||||
).format(
|
||||
sql.Identifier(schema_name, MIGRATIONS_TABLE)
|
||||
),
|
||||
).format(sql.Identifier(schema_name, MIGRATIONS_TABLE)),
|
||||
(path.name,),
|
||||
)
|
||||
return path.name
|
||||
@@ -820,9 +820,7 @@ class DatabaseSetup:
|
||||
"filename TEXT PRIMARY KEY,"
|
||||
"applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()"
|
||||
")"
|
||||
).format(
|
||||
sql.Identifier(schema_name, MIGRATIONS_TABLE)
|
||||
)
|
||||
).format(sql.Identifier(schema_name, MIGRATIONS_TABLE))
|
||||
)
|
||||
|
||||
def _fetch_applied_migrations(self, cursor, schema_name: str) -> set[str]:
|
||||
@@ -974,7 +972,7 @@ class DatabaseSetup:
|
||||
(database,),
|
||||
)
|
||||
cursor.execute(
|
||||
sql.SQL("DROP DATABASE IF EXISTS {}" ).format(
|
||||
sql.SQL("DROP DATABASE IF EXISTS {}").format(
|
||||
sql.Identifier(database)
|
||||
)
|
||||
)
|
||||
@@ -985,7 +983,7 @@ class DatabaseSetup:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(
|
||||
sql.SQL("DROP ROLE IF EXISTS {}" ).format(
|
||||
sql.SQL("DROP ROLE IF EXISTS {}").format(
|
||||
sql.Identifier(role)
|
||||
)
|
||||
)
|
||||
@@ -1000,27 +998,35 @@ class DatabaseSetup:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(
|
||||
sql.SQL("REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} FROM {}" ).format(
|
||||
sql.SQL(
|
||||
"REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA {} FROM {}"
|
||||
).format(
|
||||
sql.Identifier(schema_name),
|
||||
sql.Identifier(self.config.user)
|
||||
sql.Identifier(self.config.user),
|
||||
)
|
||||
)
|
||||
cursor.execute(
|
||||
sql.SQL("REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} FROM {}" ).format(
|
||||
sql.SQL(
|
||||
"REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {} FROM {}"
|
||||
).format(
|
||||
sql.Identifier(schema_name),
|
||||
sql.Identifier(self.config.user)
|
||||
sql.Identifier(self.config.user),
|
||||
)
|
||||
)
|
||||
cursor.execute(
|
||||
sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM {}" ).format(
|
||||
sql.SQL(
|
||||
"ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM {}"
|
||||
).format(
|
||||
sql.Identifier(schema_name),
|
||||
sql.Identifier(self.config.user)
|
||||
sql.Identifier(self.config.user),
|
||||
)
|
||||
)
|
||||
cursor.execute(
|
||||
sql.SQL("ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE USAGE, SELECT ON SEQUENCES FROM {}" ).format(
|
||||
sql.SQL(
|
||||
"ALTER DEFAULT PRIVILEGES IN SCHEMA {} REVOKE USAGE, SELECT ON SEQUENCES FROM {}"
|
||||
).format(
|
||||
sql.Identifier(schema_name),
|
||||
sql.Identifier(self.config.user)
|
||||
sql.Identifier(self.config.user),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1064,19 +1070,18 @@ def parse_args() -> argparse.Namespace:
|
||||
)
|
||||
parser.add_argument("--db-driver", help="Override DATABASE_DRIVER")
|
||||
parser.add_argument("--db-host", help="Override DATABASE_HOST")
|
||||
parser.add_argument("--db-port", type=int,
|
||||
help="Override DATABASE_PORT")
|
||||
parser.add_argument("--db-port", type=int, help="Override DATABASE_PORT")
|
||||
parser.add_argument("--db-name", help="Override DATABASE_NAME")
|
||||
parser.add_argument("--db-user", help="Override DATABASE_USER")
|
||||
parser.add_argument(
|
||||
"--db-password", help="Override DATABASE_PASSWORD")
|
||||
parser.add_argument("--db-password", help="Override DATABASE_PASSWORD")
|
||||
parser.add_argument("--db-schema", help="Override DATABASE_SCHEMA")
|
||||
parser.add_argument(
|
||||
"--admin-url",
|
||||
help="Override DATABASE_ADMIN_URL for administrative operations",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--admin-user", help="Override DATABASE_SUPERUSER for admin ops")
|
||||
"--admin-user", help="Override DATABASE_SUPERUSER for admin ops"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--admin-password",
|
||||
help="Override DATABASE_SUPERUSER_PASSWORD for admin ops",
|
||||
@@ -1091,7 +1096,11 @@ def parse_args() -> argparse.Namespace:
|
||||
help="Log actions without applying changes.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", "-v", action="count", default=0, help="Increase logging verbosity"
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="count",
|
||||
default=0,
|
||||
help="Increase logging verbosity",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
@@ -1099,8 +1108,9 @@ def parse_args() -> argparse.Namespace:
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
level = logging.WARNING - (10 * min(args.verbose, 2))
|
||||
logging.basicConfig(level=max(level, logging.INFO),
|
||||
format="%(levelname)s %(message)s")
|
||||
logging.basicConfig(
|
||||
level=max(level, logging.INFO), format="%(levelname)s %(message)s"
|
||||
)
|
||||
|
||||
override_args: dict[str, Optional[str]] = {
|
||||
"DATABASE_DRIVER": args.db_driver,
|
||||
@@ -1120,7 +1130,9 @@ def main() -> None:
|
||||
config = DatabaseConfig.from_env(overrides=override_args)
|
||||
setup = DatabaseSetup(config, dry_run=args.dry_run)
|
||||
|
||||
admin_tasks_requested = args.ensure_database or args.ensure_role or args.ensure_schema
|
||||
admin_tasks_requested = (
|
||||
args.ensure_database or args.ensure_role or args.ensure_schema
|
||||
)
|
||||
if admin_tasks_requested:
|
||||
setup.validate_admin_connection()
|
||||
|
||||
@@ -1145,9 +1157,7 @@ def main() -> None:
|
||||
auto_run_migrations_reason: Optional[str] = None
|
||||
if args.seed_data and not should_run_migrations:
|
||||
should_run_migrations = True
|
||||
auto_run_migrations_reason = (
|
||||
"Seed data requested without explicit --run-migrations; applying migrations first."
|
||||
)
|
||||
auto_run_migrations_reason = "Seed data requested without explicit --run-migrations; applying migrations first."
|
||||
|
||||
try:
|
||||
if args.ensure_database:
|
||||
@@ -1167,9 +1177,7 @@ def main() -> None:
|
||||
if auto_run_migrations_reason:
|
||||
logger.info(auto_run_migrations_reason)
|
||||
migrations_path = (
|
||||
Path(args.migrations_dir)
|
||||
if args.migrations_dir
|
||||
else None
|
||||
Path(args.migrations_dir) if args.migrations_dir else None
|
||||
)
|
||||
setup.run_migrations(migrations_path)
|
||||
if args.seed_data:
|
||||
|
||||
Reference in New Issue
Block a user