chore: remove CI workflow file and update test files for improved structure and functionality
Some checks failed
CI / lint (push) Successful in 15s
CI / test (push) Failing after 16s
CI / build (push) Has been skipped
CI / deploy (push) Has been skipped

This commit is contained in:
2025-11-14 13:25:02 +01:00
parent e5e346b26a
commit e9678b6736
4 changed files with 19 additions and 220 deletions

View File

@@ -1,212 +0,0 @@
name: CI
on:
push:
branches: [main, develop, v2]
pull_request:
branches: [main, develop]
jobs:
lint:
runs-on: ubuntu-latest
env:
APT_CACHER_NG: http://192.168.88.14:3142
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
# - name: Cache pip dependencies
# uses: actions/cache@v4
# with:
# path: /root/.cache/pip
# key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'requirements-test.txt', 'pyproject.toml') }}
# restore-keys: |
# ${{ runner.os }}-pip-
- name: Configure apt proxy
run: |
if [ -n \"${APT_CACHER_NG}\" ]; then
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
fi
- name: Install system packages
run: |
apt-get update
apt-get install -y build-essential libpq-dev
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Run Ruff
run: ruff check .
- name: Run Black
run: black --check .
- name: Run bandit
run: bandit -c pyproject.toml -r tests
test:
runs-on: ubuntu-latest
needs: lint
env:
APT_CACHER_NG: http://192.168.88.14:3142
DB_DRIVER: postgresql+psycopg2
DB_HOST: 192.168.88.35
DB_NAME: calminer_test
DB_USER: calminer
DB_PASSWORD: calminer_password
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: ${{ env.DB_USER }}
POSTGRES_PASSWORD: ${{ env.DB_PASSWORD }}
POSTGRES_DB: ${{ env.DB_NAME }}
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
- name: Get pip cache dir
id: pip-cache
run: |
echo \"path=$(pip cache dir)\" >> $GITEA_OUTPUT
echo \"Pip cache dir: $(pip cache dir)\"
# - name: Cache pip dependencies
# uses: actions/cache@v4
# with:
# path: /root/.cache/pip
# key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt', 'requirements-test.txt', 'pyproject.toml') }}
# restore-keys: |
# ${{ runner.os }}-pip-
- name: Configure apt proxy
run: |
if [ -n \"${APT_CACHER_NG}\" ]; then
echo "Acquire::http::Proxy \"${APT_CACHER_NG}\";" | tee /etc/apt/apt.conf.d/01apt-cacher-ng
fi
- name: Install system packages
run: |
apt-get update
apt-get install -y build-essential libpq-dev
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Run tests
env:
DATABASE_DRIVER: ${{ env.DB_DRIVER }}
DATABASE_HOST: postgres
DATABASE_PORT: 5432
DATABASE_USER: ${{ env.DB_USER }}
DATABASE_PASSWORD: ${{ env.DB_PASSWORD }}
DATABASE_NAME: ${{ env.DB_NAME }}
run: |
pytest --cov=. --cov-report=term-missing --cov-report=xml --junitxml=pytest-report.xml
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-artifacts
path: |
coverage.xml
pytest-report.xml
build:
runs-on: ubuntu-latest
needs:
- lint
- test
env:
DEFAULT_BRANCH: main
REGISTRY_URL: ${{ secrets.REGISTRY_URL }}
REGISTRY_USERNAME: ${{ secrets.REGISTRY_USERNAME }}
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
REGISTRY_CONTAINER_NAME: calminer
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Collect workflow metadata
id: meta
shell: bash
run: |
ref_name=\"${GITHUB_REF_NAME:-${GITHUB_REF##*/}}\"
event_name=\"${GITHUB_EVENT_NAME:-}\"
sha=\"${GITHUB_SHA:-}\"
if [ \"$ref_name\" = \"${DEFAULT_BRANCH:-main}\" ]; then
echo \"on_default=true\" >> \"$GITHUB_OUTPUT\"
else
echo \"on_default=false\" >> \"$GITHUB_OUTPUT\"
fi
echo \"ref_name=$ref_name\" >> \"$GITHUB_OUTPUT\"
echo \"event_name=$event_name\" >> \"$GITHUB_OUTPUT\"
echo \"sha=$sha\" >> \"$GITHUB_OUTPUT\"
- name: Set up QEMU and Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to gitea registry
if: ${{ steps.meta.outputs.on_default == 'true' }}
uses: docker/login-action@v3
continue-on-error: true
with:
registry: ${{ env.REGISTRY_URL }}
username: ${{ env.REGISTRY_USERNAME }}
password: ${{ env.REGISTRY_PASSWORD }}
- name: Build image
id: build-image
env:
REGISTRY_URL: ${{ env.REGISTRY_URL }}
REGISTRY_CONTAINER_NAME: ${{ env.REGISTRY_CONTAINER_NAME }}
SHA_TAG: ${{ steps.meta.outputs.sha }}
PUSH_IMAGE: ${{ steps.meta.outputs.on_default == 'true' && steps.meta.outputs.event_name != 'pull_request' && env.REGISTRY_URL != '' && env.REGISTRY_USERNAME != '' && env.REGISTRY_PASSWORD != '' }}
run: |
set -eo pipefail
LOG_FILE=build.log
if [ \"${PUSH_IMAGE}\" = \"true\" ]; then
docker buildx build \
--push \
--tag \"${REGISTRY_URL}/allucanget/${REGISTRY_CONTAINER_NAME}:latest\" \
--tag \"${REGISTRY_URL}/allucanget/${REGISTRY_CONTAINER_NAME}:${SHA_TAG}\" \
--file Dockerfile \
. 2>&1 | tee \"${LOG_FILE}\"
else
docker buildx build \
--load \
--tag \"${REGISTRY_CONTAINER_NAME}:ci\" \
--file Dockerfile \
. 2>&1 | tee \"${LOG_FILE}\"
fi
- name: Upload docker build logs
if: failure()
uses: actions/upload-artifact@v4
with:
name: docker-build-logs
path: build.log

View File

@@ -71,7 +71,8 @@ def navigation_client() -> Tuple[TestClient, StubNavigationService, AuthSession]
user = cast(User, object())
session = AuthSession(
tokens=SessionTokens(access_token=secrets.token_urlsafe(16), refresh_token=None),
tokens=SessionTokens(
access_token=secrets.token_urlsafe(16), refresh_token=None),
user=user,
role_slugs=("viewer",),
)

View File

@@ -11,8 +11,8 @@ from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from models import Role, User, UserRole
from dependencies import get_auth_session, require_current_user
from services.security import hash_password
from dependencies import get_auth_session, get_jwt_settings, require_current_user
from services.security import decode_access_token, hash_password
from services.session import AuthSession, SessionTokens
from tests.utils.security import random_password, random_token
@@ -334,6 +334,7 @@ class TestLoginFlowEndToEnd:
# Override to anonymous for login
app = cast(FastAPI, client.app)
original_override = app.dependency_overrides.get(get_auth_session)
app.dependency_overrides[get_auth_session] = lambda: AuthSession.anonymous(
)
try:
@@ -347,14 +348,21 @@ class TestLoginFlowEndToEnd:
"location") == "http://testserver/"
set_cookie_header = login_response.headers.get("set-cookie", "")
assert "calminer_access_token=" in set_cookie_header
# Now with cookies, GET / should show dashboard
dashboard_response = client.get("/")
assert dashboard_response.status_code == 200
assert "Dashboard" in dashboard_response.text or "metrics" in dashboard_response.text
finally:
app.dependency_overrides.pop(get_auth_session, None)
access_cookie = client.cookies.get("calminer_access_token")
refresh_cookie = client.cookies.get("calminer_refresh_token")
assert access_cookie, "Access token cookie was not set"
assert refresh_cookie, "Refresh token cookie was not set"
jwt_settings = get_jwt_settings()
payload = decode_access_token(access_cookie, jwt_settings)
assert payload.sub == str(user.id)
assert payload.scopes == ["auth"], "Unexpected access token scopes"
if original_override is not None:
app.dependency_overrides[get_auth_session] = original_override
def test_logout_redirects_to_login_and_clears_session(self, client: TestClient) -> None:
# Assuming authenticated from conftest
logout_response = client.get("/logout", follow_redirects=False)

View File

@@ -1,6 +1,7 @@
from fastapi.testclient import TestClient
from main import app
from scripts.init_db import init_db
def test_login_form_post_does_not_trigger_json_error():
@@ -8,6 +9,7 @@ def test_login_form_post_does_not_trigger_json_error():
the JSON "Invalid JSON payload" error which indicates the middleware
attempted to parse non-JSON bodies.
"""
init_db()
client = TestClient(app)
resp = client.post(