feat: Initialize frontend and backend structure with essential configurations
- Added TypeScript build info for frontend. - Created Vite configuration for React application. - Implemented pre-commit hook to run checks before commits. - Set up PostgreSQL Dockerfile with PostGIS support and initialization scripts. - Added database creation script for PostgreSQL with necessary extensions. - Established Python project configuration with dependencies and development tools. - Developed pre-commit script to enforce code quality checks for backend and frontend. - Created PowerShell script to set up Git hooks path.
This commit is contained in:
69
backend/migrations/env.py
Normal file
69
backend/migrations/env.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from logging.config import fileConfig
|
||||
from typing import Any, Dict
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from backend.app.core.config import get_settings
|
||||
from backend.app.db.models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
# target_metadata = None
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def get_run_options() -> Dict[str, Any]:
|
||||
settings = get_settings()
|
||||
return {"url": settings.database_url}
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
|
||||
kwargs = get_run_options()
|
||||
context.configure(
|
||||
url=kwargs["url"],
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
compare_type=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
kwargs = get_run_options()
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
url=kwargs["url"],
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
19
backend/migrations/script.py.mako
Normal file
19
backend/migrations/script.py.mako
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Template for new Alembic migration scripts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
||||
108
backend/migrations/versions/20251011_01_initial_schema.py
Normal file
108
backend/migrations/versions/20251011_01_initial_schema.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Initial PostgreSQL/PostGIS schema"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from geoalchemy2.types import Geometry
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "20251011_01"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("CREATE EXTENSION IF NOT EXISTS postgis")
|
||||
op.execute("CREATE EXTENSION IF NOT EXISTS pgcrypto")
|
||||
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("username", sa.String(length=64), nullable=False, unique=True),
|
||||
sa.Column("email", sa.String(length=255), nullable=True, unique=True),
|
||||
sa.Column("full_name", sa.String(length=128), nullable=True),
|
||||
sa.Column("password_hash", sa.String(length=256), nullable=False),
|
||||
sa.Column("role", sa.String(length=32), nullable=False, server_default="player"),
|
||||
sa.Column("preferences", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"stations",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("osm_id", sa.String(length=32), nullable=True),
|
||||
sa.Column("name", sa.String(length=128), nullable=False),
|
||||
sa.Column("code", sa.String(length=16), nullable=True),
|
||||
sa.Column("location", Geometry(geometry_type="POINT", srid=4326), nullable=False),
|
||||
sa.Column("elevation_m", sa.Float(), nullable=True),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, server_default=sa.text("true")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
)
|
||||
op.create_index("ix_stations_location", "stations", ["location"], postgresql_using="gist")
|
||||
|
||||
op.create_table(
|
||||
"tracks",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("name", sa.String(length=128), nullable=True),
|
||||
sa.Column("start_station_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("end_station_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("length_meters", sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column("max_speed_kph", sa.Integer(), nullable=True),
|
||||
sa.Column("is_bidirectional", sa.Boolean(), nullable=False, server_default=sa.text("true")),
|
||||
sa.Column("status", sa.String(length=32), nullable=False, server_default="planned"),
|
||||
sa.Column("track_geometry", Geometry(geometry_type="LINESTRING", srid=4326), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.ForeignKeyConstraint(["start_station_id"], ["stations.id"], ondelete="RESTRICT"),
|
||||
sa.ForeignKeyConstraint(["end_station_id"], ["stations.id"], ondelete="RESTRICT"),
|
||||
sa.UniqueConstraint("start_station_id", "end_station_id", name="uq_tracks_station_pair"),
|
||||
)
|
||||
op.create_index("ix_tracks_geometry", "tracks", ["track_geometry"], postgresql_using="gist")
|
||||
|
||||
op.create_table(
|
||||
"trains",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("designation", sa.String(length=64), nullable=False, unique=True),
|
||||
sa.Column("operator_id", postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column("home_station_id", postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column("capacity", sa.Integer(), nullable=False),
|
||||
sa.Column("max_speed_kph", sa.Integer(), nullable=False),
|
||||
sa.Column("consist", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.ForeignKeyConstraint(["operator_id"], ["users.id"], ondelete="SET NULL"),
|
||||
sa.ForeignKeyConstraint(["home_station_id"], ["stations.id"], ondelete="SET NULL"),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"train_schedules",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("train_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("sequence_index", sa.Integer(), nullable=False),
|
||||
sa.Column("station_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("scheduled_arrival", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("scheduled_departure", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("dwell_seconds", sa.Integer(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("timezone('utc', now())"), nullable=False),
|
||||
sa.ForeignKeyConstraint(["train_id"], ["trains.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["station_id"], ["stations.id"], ondelete="CASCADE"),
|
||||
sa.UniqueConstraint("train_id", "sequence_index", name="uq_train_schedule_sequence"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("train_schedules")
|
||||
op.drop_table("trains")
|
||||
op.drop_index("ix_tracks_geometry", table_name="tracks")
|
||||
op.drop_table("tracks")
|
||||
op.drop_index("ix_stations_location", table_name="stations")
|
||||
op.drop_table("stations")
|
||||
op.drop_table("users")
|
||||
op.execute("DROP EXTENSION IF EXISTS pgcrypto")
|
||||
op.execute("DROP EXTENSION IF EXISTS postgis")
|
||||
Reference in New Issue
Block a user