ci: add Forgejo Actions workflow with ruff, pytest, JSON + link checks
- .forgejo/workflows/ci.yml: four jobs (lint, test, validate-json, markdown-links) running on push to main and on pull requests - pyproject.toml: project metadata, flask dep, dev extras (ruff, pytest), ruff config (E/F/I/W/B/UP rulesets, 100-char lines, py311 target), pytest config (pythonpath=webinstaller so tests can import drives) - tests/test_drives.py: 11 unit tests covering parse_size_gb (TB/GB/MB, European comma decimal, empty input, unknown units), drive type scoring (nvme/ssd/hdd), size scoring bands, and score_device summing - .gitignore: ignore .pytest_cache, *.egg-info, .ruff_cache - webinstaller/drives.py: refactor subprocess.run to capture_output kwarg (ruff UP022) — drops four lines, same behavior - webinstaller/app.py: ruff-sorted imports (isort) All checks pass locally: ruff check + format, pytest 11/11, JSON valid. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
7759574481
commit
852efdb0ed
6 changed files with 166 additions and 5 deletions
60
.forgejo/workflows/ci.yml
Normal file
60
.forgejo/workflows/ci.yml
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
tags: ['**']
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- name: Install ruff
|
||||||
|
run: pip install ruff
|
||||||
|
- name: Lint
|
||||||
|
run: ruff check .
|
||||||
|
- name: Format check
|
||||||
|
run: ruff format --check .
|
||||||
|
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- name: Install project with dev extras
|
||||||
|
run: pip install -e ".[dev]"
|
||||||
|
- name: Run pytest
|
||||||
|
run: pytest -v
|
||||||
|
|
||||||
|
validate-json:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- name: Validate all JSON files
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
for f in $(find . -name '*.json' -not -path './.venv/*' -not -path './node_modules/*'); do
|
||||||
|
echo "Validating $f"
|
||||||
|
python -m json.tool "$f" > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
markdown-links:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Check markdown links
|
||||||
|
uses: lycheeverse/lychee-action@v2
|
||||||
|
with:
|
||||||
|
args: --verbose --no-progress --max-concurrency 4 './**/*.md'
|
||||||
|
fail: false
|
||||||
|
continue-on-error: true
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,6 +1,9 @@
|
||||||
*.venv/
|
*.venv/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.pyc
|
*.pyc
|
||||||
|
.pytest_cache/
|
||||||
|
*.egg-info/
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
# Real credentials must never be committed — use the .example files
|
# Real credentials must never be committed — use the .example files
|
||||||
archinstall/user_credentials.json
|
archinstall/user_credentials.json
|
||||||
|
|
|
||||||
43
pyproject.toml
Normal file
43
pyproject.toml
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
[project]
|
||||||
|
name = "homebase"
|
||||||
|
version = "26.0-alpha"
|
||||||
|
description = "Open-source home server OS — simple enough for everyone."
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
readme = "README.md"
|
||||||
|
license = { text = "AGPL-3.0-or-later" }
|
||||||
|
authors = [
|
||||||
|
{ name = "Daniel Syrnicki" },
|
||||||
|
{ name = "Robert Syrnicki" },
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"flask>=3.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"ruff>=0.6",
|
||||||
|
"pytest>=8.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 100
|
||||||
|
target-version = "py311"
|
||||||
|
extend-exclude = [".venv", "*.venv"]
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = [
|
||||||
|
"E", # pycodestyle errors
|
||||||
|
"F", # pyflakes
|
||||||
|
"I", # isort
|
||||||
|
"W", # pycodestyle warnings
|
||||||
|
"B", # flake8-bugbear
|
||||||
|
"UP", # pyupgrade
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
pythonpath = ["webinstaller"]
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
py-modules = []
|
||||||
57
tests/test_drives.py
Normal file
57
tests/test_drives.py
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
from drives import (
|
||||||
|
get_drive_type_score,
|
||||||
|
get_size_score,
|
||||||
|
parse_size_gb,
|
||||||
|
score_device,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_terabytes():
|
||||||
|
assert parse_size_gb("1T") == 1024.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_gigabytes():
|
||||||
|
assert parse_size_gb("500G") == 500.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_megabytes():
|
||||||
|
assert parse_size_gb("2048M") == 2.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_european_comma_decimal():
|
||||||
|
assert parse_size_gb("1,5T") == 1.5 * 1024
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_empty_returns_none():
|
||||||
|
assert parse_size_gb("") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_size_gb_unknown_unit_returns_none():
|
||||||
|
assert parse_size_gb("500K") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_drive_type_score_nvme():
|
||||||
|
assert get_drive_type_score("/dev/nvme0n1") == 15
|
||||||
|
|
||||||
|
|
||||||
|
def test_drive_type_score_ssd():
|
||||||
|
assert get_drive_type_score("/dev/ssd0") == 10
|
||||||
|
|
||||||
|
|
||||||
|
def test_drive_type_score_hdd_fallback():
|
||||||
|
assert get_drive_type_score("/dev/sda") == 5
|
||||||
|
|
||||||
|
|
||||||
|
def test_size_score_bands():
|
||||||
|
assert get_size_score(None) == 5
|
||||||
|
assert get_size_score(64) == 5
|
||||||
|
assert get_size_score(256) == 7
|
||||||
|
assert get_size_score(1024) == 10
|
||||||
|
|
||||||
|
|
||||||
|
def test_score_device_sums_type_and_size(monkeypatch):
|
||||||
|
import drives
|
||||||
|
|
||||||
|
monkeypatch.setattr(drives, "get_drive_health", lambda _: 10)
|
||||||
|
assert score_device("/dev/nvme0n1", 1024) == 15 + 10 + 10
|
||||||
|
assert score_device("/dev/sda", 64) == 5 + 10 + 5
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from flask import Flask, render_template, request, redirect, url_for
|
|
||||||
from drives import list_scored_devices
|
from drives import list_scored_devices
|
||||||
|
from flask import Flask, redirect, render_template, request, url_for
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,7 @@ def get_drive_health(device):
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
["smartctl", "-H", device],
|
["smartctl", "-H", device],
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
)
|
)
|
||||||
output = result.stdout.decode()
|
output = result.stdout.decode()
|
||||||
if "PASSED" in output:
|
if "PASSED" in output:
|
||||||
|
|
@ -61,8 +60,7 @@ def list_scored_devices():
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
["lsblk", "-dn", "-o", "NAME,SIZE"],
|
["lsblk", "-dn", "-o", "NAME,SIZE"],
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
text=True,
|
text=True,
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue