From 3a8fad518587edbe817989c99aaf60ee66ab256e Mon Sep 17 00:00:00 2001 From: Daniel Maksymilian Syrnicki Date: Mon, 20 Apr 2026 14:16:02 +0200 Subject: [PATCH] feat(catalog): on-box apps catalog synced independently of core version MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit New `furtka catalog sync` pulls the latest daniel/furtka-apps release, verifies its sha256, extracts under /var/lib/furtka/catalog/, and atomically swaps into place — so apps can ship without cutting a new Furtka core release. A daily timer (furtka-catalog-sync.timer, 10 min post-boot + 24 h with ±6 h jitter) drives the sync; /apps gets a manual "Sync apps catalog" button that kicks the same code path via a detached systemd-run unit. Layout of the new on-box tree: /var/lib/furtka/catalog/ synced catalog (survives self-updates) ├── VERSION └── apps// ... /var/lib/furtka/catalog-state.json sync stage + last version, UI-polled /run/furtka/catalog.lock flock so timer + manual click can't race Resolver precedence (furtka/sources.py): catalog wins over the bundled seed (/opt/furtka/current/apps/, carried by the core release for offline first-boot). Installed apps under /var/lib/furtka/apps/ are never auto- swapped — user clicks Reinstall to move an existing install onto a newer catalog version; settings merge-preserved via the existing installer.install_from path. New files: - furtka/_release_common.py — shared Forgejo/tarball primitives lifted from furtka/updater.py. Both modules now import from here; updater's behaviour and public API unchanged. - furtka/catalog.py — check_catalog(), sync_catalog() with staging + manifest validation + atomic rename. Refuses bad sha256 / broken manifests and leaves the live catalog intact on any failure path. - furtka/sources.py — resolve_app_name() / list_available() abstraction used by installer.resolve_source and api._list_available. - assets/systemd/furtka-catalog-sync.{service,timer} — oneshot service + daily timer. Timer auto-enables on self-update via a one-line addition to _link_new_units (fresh installs get enabled via the webinstaller's _FURTKA_UNITS list). API + UI: - /api/bundled renamed internally to _list_available; endpoint stays as a backcompat alias; /api/apps/available is the new canonical name. Each list entry carries a `source` field ("catalog" | "bundled"). - POST /api/catalog/sync/check + /apply + GET /api/catalog/status. - /apps page grows a catalog-status row + Sync button; poll loop mirrors the Furtka self-update flow. CLI: `furtka catalog sync [--check]` + `furtka catalog status` (both support --json). Old `furtka app install` / `reconcile` / `update` / `rollback` surfaces are unchanged. Test gate: 194/170 baseline + 24 new tests covering catalog sync (happy path, sha256 mismatch, invalid manifest, lock contention, preserves-on-failure) + resolver precedence + api renames. ruff check + format clean. Co-Authored-By: Claude Opus 4.7 (1M context) --- assets/systemd/furtka-catalog-sync.service | 12 + assets/systemd/furtka-catalog-sync.timer | 14 + assets/www/style.css | 17 ++ furtka/_release_common.py | 115 +++++++ furtka/api.py | 183 +++++++++-- furtka/catalog.py | 253 ++++++++++++++++ furtka/cli.py | 84 ++++++ furtka/installer.py | 14 +- furtka/paths.py | 12 + furtka/sources.py | 75 +++++ furtka/updater.py | 93 ++---- tests/test_api.py | 74 ++++- tests/test_catalog.py | 333 +++++++++++++++++++++ tests/test_sources.py | 108 +++++++ tests/test_updater.py | 18 +- webinstaller/app.py | 4 + 16 files changed, 1299 insertions(+), 110 deletions(-) create mode 100644 assets/systemd/furtka-catalog-sync.service create mode 100644 assets/systemd/furtka-catalog-sync.timer create mode 100644 furtka/_release_common.py create mode 100644 furtka/catalog.py create mode 100644 furtka/sources.py create mode 100644 tests/test_catalog.py create mode 100644 tests/test_sources.py diff --git a/assets/systemd/furtka-catalog-sync.service b/assets/systemd/furtka-catalog-sync.service new file mode 100644 index 0000000..fa0307c --- /dev/null +++ b/assets/systemd/furtka-catalog-sync.service @@ -0,0 +1,12 @@ +[Unit] +Description=Furtka apps catalog sync +Requires=network-online.target +After=network-online.target + +[Service] +Type=oneshot +ExecStart=/usr/local/bin/furtka catalog sync +TimeoutStartSec=5min + +[Install] +WantedBy=multi-user.target diff --git a/assets/systemd/furtka-catalog-sync.timer b/assets/systemd/furtka-catalog-sync.timer new file mode 100644 index 0000000..3561350 --- /dev/null +++ b/assets/systemd/furtka-catalog-sync.timer @@ -0,0 +1,14 @@ +[Unit] +Description=Furtka apps catalog daily sync + +[Timer] +# First sync 10 min after boot, then once per day with up to 6 h jitter so +# a fleet of boxes doesn't all hit Forgejo at the same second. Persistent +# = catch up if the box was off when the timer should have fired. +OnBootSec=10min +OnUnitActiveSec=24h +RandomizedDelaySec=6h +Persistent=true + +[Install] +WantedBy=timers.target diff --git a/assets/www/style.css b/assets/www/style.css index 4aa1671..1fcedef 100644 --- a/assets/www/style.css +++ b/assets/www/style.css @@ -219,6 +219,23 @@ button.danger { background: var(--danger); color: #fff; } button:disabled { opacity: 0.5; cursor: wait; } button:focus-visible { outline: none; box-shadow: var(--ring); } .empty { color: var(--muted); font-style: italic; padding: 0.5rem 0; } +.catalog-row { + display: flex; + justify-content: space-between; + align-items: center; + flex-wrap: wrap; + gap: 0.75rem; + padding: 0.5rem 0 0.75rem; +} +.catalog-state { + margin: 0; + color: var(--muted); + font-size: 0.9rem; +} +.catalog-stage.pending { + color: var(--fg); + font-style: italic; +} pre { background: var(--card); padding: 1rem; diff --git a/furtka/_release_common.py b/furtka/_release_common.py new file mode 100644 index 0000000..683f62d --- /dev/null +++ b/furtka/_release_common.py @@ -0,0 +1,115 @@ +"""Shared primitives for release-tarball flows. + +Both ``furtka.updater`` (core self-update) and ``furtka.catalog`` (apps +catalog sync) pull a tarball from a Forgejo Releases page, verify its +SHA256 against the ``.sha256`` sidecar, and extract it with a path- +traversal guard. The helpers here are the single implementation of +that dance. + +Each error-raising helper accepts an ``error_cls`` kwarg so callers can +keep their domain-specific exception type (``UpdateError``, +``CatalogError``) at call sites — the helper itself defaults to a +neutral ``ReleaseError`` for use in tests or standalone scripts. +""" + +from __future__ import annotations + +import hashlib +import json +import shutil +import tarfile +import urllib.error +import urllib.request +from pathlib import Path + + +class ReleaseError(RuntimeError): + """Neutral failure for release-tarball operations.""" + + +def forgejo_api(host: str, repo: str, path: str, *, error_cls: type = ReleaseError) -> dict | list: + url = f"https://{host}/api/v1/repos/{repo}{path}" + req = urllib.request.Request(url, headers={"Accept": "application/json"}) + try: + with urllib.request.urlopen(req, timeout=15) as resp: + return json.loads(resp.read()) + except (urllib.error.URLError, json.JSONDecodeError) as e: + raise error_cls(f"forgejo api {url}: {e}") from e + + +def download(url: str, dest: Path, *, error_cls: type = ReleaseError) -> None: + dest.parent.mkdir(parents=True, exist_ok=True) + req = urllib.request.Request(url) + try: + with urllib.request.urlopen(req, timeout=60) as resp, dest.open("wb") as f: + shutil.copyfileobj(resp, f) + except urllib.error.URLError as e: + raise error_cls(f"download {url}: {e}") from e + + +def sha256_of(path: Path) -> str: + h = hashlib.sha256() + with path.open("rb") as f: + for chunk in iter(lambda: f.read(1024 * 1024), b""): + h.update(chunk) + return h.hexdigest() + + +def verify_tarball(tarball: Path, expected_sha: str, *, error_cls: type = ReleaseError) -> None: + actual = sha256_of(tarball) + if actual != expected_sha: + raise error_cls(f"sha256 mismatch: expected {expected_sha}, got {actual}") + + +def parse_sha256_sidecar(text: str, *, error_cls: type = ReleaseError) -> str: + """Extract the hash from a standard `sha256sum` sidecar line.""" + line = text.strip().split("\n", 1)[0].strip() + if not line: + raise error_cls("empty sha256 sidecar") + return line.split()[0] + + +def extract_tarball(tarball: Path, dest: Path, *, error_cls: type = ReleaseError) -> str: + """Extract the tarball and return the VERSION read from its root. + + Refuses entries that could escape ``dest`` via absolute paths or ``..`` + segments. On Python 3.12+ the stricter ``data`` filter is additionally + enabled to catch symlink-escape / device-node / setuid tricks that the + regex check can't see. + """ + dest.mkdir(parents=True, exist_ok=True) + with tarfile.open(tarball, "r:gz") as tf: + for member in tf.getmembers(): + if member.name.startswith(("/", "..")) or ".." in Path(member.name).parts: + raise error_cls(f"refusing tarball entry {member.name!r}") + try: + tf.extractall(dest, filter="data") + except TypeError: + tf.extractall(dest) + version_file = dest / "VERSION" + if not version_file.is_file(): + raise error_cls("tarball has no VERSION file at root") + return version_file.read_text().strip() + + +def version_tuple(v: str) -> tuple: + """CalVer comparator: 26.1-alpha < 26.1-beta < 26.1-rc < 26.1 < 26.2-alpha. + + Pre-release stages sort before the corresponding stable (no-suffix) + release. Unknown suffixes sort below everything except the malformed + fallback. Returns a tuple of (year, release, stage_rank, suffix). + """ + stage_rank = {"alpha": 0, "beta": 1, "rc": 2} + head, _, suffix = v.partition("-") + try: + year_str, release_str = head.split(".", 1) + year = int(year_str) + release = int(release_str) + except (ValueError, IndexError): + return (-1, -1, -1, v) + if not suffix: + return (year, release, 3, "") + for name, rank in stage_rank.items(): + if suffix.startswith(name): + return (year, release, rank, suffix) + return (year, release, -1, suffix) diff --git a/furtka/api.py b/furtka/api.py index f0249fa..71523df 100644 --- a/furtka/api.py +++ b/furtka/api.py @@ -15,9 +15,9 @@ import json import re from http.server import BaseHTTPRequestHandler, HTTPServer -from furtka import dockerops, installer, reconciler +from furtka import dockerops, installer, reconciler, sources from furtka.manifest import ManifestError, load_manifest -from furtka.paths import apps_dir, bundled_apps_dir +from furtka.paths import apps_dir from furtka.scanner import scan _ICON_MAX_BYTES = 16 * 1024 @@ -88,6 +88,10 @@ _HTML = """

Available to install

+
+

Catalog version · last sync never

+ +
@@ -316,7 +320,56 @@ async function handleButton(op, name, btn) { await refresh(); } +async function refreshCatalog() { + let status; + try { + status = await fetch('/api/catalog/status').then(r => r.json()); + } catch (e) { + return; + } + const cur = status.current || 'never synced'; + document.getElementById('catalog-current').textContent = cur; + const stage = (status.state || {}).stage || ''; + const updatedAt = (status.state || {}).updated_at || ''; + document.getElementById('catalog-last-sync').textContent = updatedAt || 'never'; + const stageEl = document.getElementById('catalog-stage'); + if (stage && stage !== 'done') { + stageEl.textContent = '· ' + stage + '…'; + stageEl.classList.add('pending'); + } else { + stageEl.textContent = ''; + stageEl.classList.remove('pending'); + } +} + +const catalogBtn = document.getElementById('catalog-sync-btn'); +catalogBtn.addEventListener('click', async () => { + catalogBtn.disabled = true; + const original = catalogBtn.textContent; + catalogBtn.textContent = 'Syncing…'; + try { + const r = await fetch('/api/catalog/sync/apply', {method: 'POST'}); + const data = await r.json(); + document.getElementById('log').textContent = `[catalog sync] HTTP ${r.status}\\n` + JSON.stringify(data, null, 2); + // Poll for completion — sync is fast (KB-range tarball) so 30 s is plenty. + const deadline = Date.now() + 30000; + while (Date.now() < deadline) { + await new Promise(res => setTimeout(res, 1500)); + const s = await fetch('/api/catalog/status').then(r => r.json()).catch(() => null); + const stage = (s && s.state && s.state.stage) || ''; + if (stage === 'done' || stage === 'error') break; + } + await refreshCatalog(); + await refresh(); + } catch (e) { + document.getElementById('log').textContent = `[catalog sync] network error: ${e.message}`; + } + catalogBtn.disabled = false; + catalogBtn.textContent = original; +}); + refresh(); +refreshCatalog(); @@ -349,28 +402,31 @@ def _list_installed(): return out -def _list_bundled(): +def _list_available(): + """Apps available to install — catalog union bundled, catalog wins on collision. + + Each entry carries a `"source"` field (`"catalog"` | `"bundled"`) so the + UI can visually differentiate later. Already-installed apps are filtered + out so the UI shows them only in the installed list. + """ installed_names = {r.path.name for r in scan(apps_dir()) if r.ok} - bundled = bundled_apps_dir() - if not bundled.exists(): - return [] out = [] - for entry in sorted(bundled.iterdir()): - if not entry.is_dir() or entry.name in installed_names: - continue - manifest_path = entry / "manifest.json" - if not manifest_path.exists(): + for app_source in sources.list_available(): + if app_source.path.name in installed_names: continue + manifest_path = app_source.path / "manifest.json" try: m = load_manifest(manifest_path) except ManifestError: continue - out.append(_manifest_summary(m, entry)) + summary = _manifest_summary(m, app_source.path) + summary["source"] = app_source.origin + out.append(summary) return out def _load_manifest_for(name): - """Return (manifest, env_values, installed_bool) for an installed or bundled app. + """Return (manifest, env_values, installed_bool) for an installed or bundled/catalog app. Returns (None, None, False) if the name doesn't resolve anywhere. """ @@ -382,13 +438,13 @@ def _load_manifest_for(name): return None, None, False values = installer.read_env_values(target / ".env") return m, values, True - bundled = bundled_apps_dir() / name - if bundled.exists() and (bundled / "manifest.json").exists(): + resolved = sources.resolve_app_name(name) + if resolved is not None: try: - m = load_manifest(bundled / "manifest.json") + m = load_manifest(resolved.path / "manifest.json") except ManifestError: return None, None, False - env_example = bundled / ".env.example" + env_example = resolved.path / ".env.example" values = installer.read_env_values(env_example) if env_example.exists() else {} return m, values, False return None, None, False @@ -583,6 +639,82 @@ def _do_furtka_status(): return 200, updater.read_state() +def _do_catalog_check(): + """Check Forgejo for a newer apps-catalog release. + + Parallels _do_furtka_check: returns current/latest/update_available. + """ + from furtka import catalog + + try: + check = catalog.check_catalog() + except catalog.CatalogError as e: + return 502, {"error": str(e)} + return 200, { + "current": check.current, + "latest": check.latest, + "update_available": check.update_available, + } + + +def _do_catalog_apply(): + """Kick off a catalog sync detached from this process. + + Catalog sync doesn't restart furtka-api, so the lifecycle constraint that + forces the Furtka self-update to detach doesn't strictly apply here — but + using the same systemd-run pattern keeps the two UI flows symmetric and + means a slow network can't tie up the API thread. Client polls + /api/catalog/status the same way it polls /update-state.json. + """ + import subprocess + + from furtka import catalog + + try: + fh = catalog.acquire_lock() + except catalog.CatalogError as e: + return 409, {"error": str(e)} + fh.close() + + try: + subprocess.run( + [ + "systemd-run", + "--unit=furtka-catalog-sync-api", + "--no-block", + "--collect", + "/usr/local/bin/furtka", + "catalog", + "sync", + ], + check=True, + capture_output=True, + text=True, + ) + except FileNotFoundError: + return 502, {"error": "systemd-run not available"} + except subprocess.CalledProcessError as e: + return 502, { + "error": f"systemd-run failed: {(e.stderr or e.stdout or '').strip()}", + } + return 202, {"status": "dispatched", "unit": "furtka-catalog-sync-api"} + + +def _do_catalog_status(): + """Return {current, state} for the apps catalog. + + `current` is the catalog's on-disk VERSION; `state` is whatever was last + written by sync_catalog to catalog-state.json. UI uses both: show the + version next to a last-sync timestamp plus a stage indicator. + """ + from furtka import catalog + + return 200, { + "current": catalog.read_current_catalog_version(), + "state": catalog.read_state(), + } + + def _do_update(name): """Pull newer container images for an installed app; restart if any changed. @@ -652,14 +784,19 @@ class _Handler(BaseHTTPRequestHandler): return self._html(200, _HTML) if self.path == "/api/apps": return self._json(200, _list_installed()) - if self.path == "/api/bundled": - return self._json(200, _list_bundled()) + # /api/bundled is the pre-26.6 name for this list; kept as an alias + # so any external tooling survives the rename to /api/apps/available. + if self.path in ("/api/bundled", "/api/apps/available"): + return self._json(200, _list_available()) if self.path == "/api/furtka/update/status": status, body = _do_furtka_status() return self._json(status, body) if self.path == "/api/furtka/https/status": status, body = _do_https_status() return self._json(status, body) + if self.path == "/api/catalog/status": + status, body = _do_catalog_status() + return self._json(status, body) # /api/apps//settings if self.path.startswith("/api/apps/") and self.path.endswith("/settings"): name = self.path[len("/api/apps/") : -len("/settings")] @@ -709,6 +846,14 @@ class _Handler(BaseHTTPRequestHandler): status, body = _do_https_force(payload) return self._json(status, body) + # Apps catalog: check + apply (daily timer + manual UI button). + if self.path == "/api/catalog/sync/check": + status, body = _do_catalog_check() + return self._json(status, body) + if self.path == "/api/catalog/sync/apply": + status, body = _do_catalog_apply() + return self._json(status, body) + name = payload.get("name") if not isinstance(name, str) or not name: return self._json(400, {"error": "missing or empty 'name' field"}) diff --git a/furtka/catalog.py b/furtka/catalog.py new file mode 100644 index 0000000..5dd8b9d --- /dev/null +++ b/furtka/catalog.py @@ -0,0 +1,253 @@ +"""Furtka apps catalog sync. + +Mirrors the shape of ``furtka.updater`` but targets a separate Forgejo +repo (``daniel/furtka-apps`` by default) whose releases carry a single +``furtka-apps-.tar.gz`` with ``VERSION`` at the root and an +``apps//`` tree underneath. Pulling the catalog keeps the on-box +app ecosystem fresh without requiring a Furtka core release — core +ships a seed ``apps/`` under ``/opt/furtka/current/apps/`` that the +resolver falls back to when the catalog is empty or stale. + +Flow of ``sync_catalog()``: + +1. flock on ``/run/furtka/catalog.lock`` so two triggers (timer + manual + UI click) can't race. +2. ``check_catalog()`` asks Forgejo for the latest release and picks out + the tarball + sidecar URLs. +3. Download tarball + sidecar to ``/var/lib/furtka/catalog/_downloads/``. +4. Verify the sha256 sidecar against the tarball. +5. Extract into ``/var/lib/furtka/catalog/_staging/``. +6. Validate every ``apps//manifest.json`` via ``furtka.manifest. + load_manifest``. A broken catalog release is refused here, not half- + applied. +7. Atomic rename: existing live catalog → ``catalog.prev/``, staging → + ``catalog/``, then rmtree the prev. Any failure before this step + leaves the live catalog untouched. +8. Write ``/var/lib/furtka/catalog-state.json`` for the UI. + +Paths can be overridden via env vars so tests can redirect everything to +a tmp dir. +""" + +from __future__ import annotations + +import fcntl +import json +import os +import shutil +import time +from dataclasses import dataclass +from pathlib import Path + +from furtka import _release_common as _rc +from furtka.manifest import ManifestError, load_manifest +from furtka.paths import catalog_dir + +FORGEJO_HOST = os.environ.get("FURTKA_FORGEJO_HOST", "forgejo.sourcegate.online") +CATALOG_REPO = os.environ.get("FURTKA_CATALOG_REPO", "daniel/furtka-apps") +_CATALOG_STATE = Path(os.environ.get("FURTKA_CATALOG_STATE", "/var/lib/furtka/catalog-state.json")) +_LOCK_PATH = Path(os.environ.get("FURTKA_CATALOG_LOCK", "/run/furtka/catalog.lock")) + +_STAGING_NAME = "_staging" +_DOWNLOADS_NAME = "_downloads" +_PREV_SUFFIX = ".prev" +_VERSION_FILE = "VERSION" + + +class CatalogError(RuntimeError): + """Any failure in the catalog sync flow that should surface to the caller.""" + + +@dataclass(frozen=True) +class CatalogCheck: + current: str | None + latest: str + update_available: bool + tarball_url: str | None + sha256_url: str | None + + +def state_path() -> Path: + return _CATALOG_STATE + + +def lock_path() -> Path: + return _LOCK_PATH + + +def read_current_catalog_version() -> str | None: + """Return the string in /VERSION, or None if absent / unreadable.""" + try: + value = (catalog_dir() / _VERSION_FILE).read_text().strip() + except (FileNotFoundError, NotADirectoryError, OSError): + return None + return value or None + + +def check_catalog() -> CatalogCheck: + """Query Forgejo for the latest catalog release. + + Uses ``/releases?limit=1`` (not ``/releases/latest``) for the same + reason the core updater does — Forgejo's ``latest`` endpoint skips + pre-releases and 404s when every tag carries a suffix. + """ + current = read_current_catalog_version() + releases = _rc.forgejo_api( + FORGEJO_HOST, CATALOG_REPO, "/releases?limit=1", error_cls=CatalogError + ) + if not isinstance(releases, list) or not releases: + raise CatalogError("no catalog releases published yet") + release = releases[0] + latest = str(release.get("tag_name") or "").strip() + if not latest: + raise CatalogError("latest catalog release has empty tag_name") + tarball_url = None + sha256_url = None + for asset in release.get("assets") or []: + name = asset.get("name") or "" + url = asset.get("browser_download_url") or "" + if name.endswith(".tar.gz") and "furtka-apps-" in name: + tarball_url = url + elif name.endswith(".tar.gz.sha256"): + sha256_url = url + available = latest != current and ( + current is None or _rc.version_tuple(latest) > _rc.version_tuple(current) + ) + return CatalogCheck( + current=current, + latest=latest, + update_available=available, + tarball_url=tarball_url, + sha256_url=sha256_url, + ) + + +def write_state(stage: str, **extra) -> None: + """Atomic JSON state write — same shape as updater's update-state.json.""" + state_path().parent.mkdir(parents=True, exist_ok=True) + tmp = state_path().with_suffix(".tmp") + payload = {"stage": stage, "updated_at": time.strftime("%Y-%m-%dT%H:%M:%S%z"), **extra} + tmp.write_text(json.dumps(payload, indent=2)) + tmp.replace(state_path()) + + +def read_state() -> dict: + try: + return json.loads(state_path().read_text()) + except (FileNotFoundError, json.JSONDecodeError): + return {} + + +def acquire_lock(): + path = lock_path() + path.parent.mkdir(parents=True, exist_ok=True) + fh = path.open("w") + try: + fcntl.flock(fh, fcntl.LOCK_EX | fcntl.LOCK_NB) + except BlockingIOError as e: + fh.close() + raise CatalogError("another catalog sync is already in progress") from e + return fh + + +def _validate_staging(staging: Path, expected_version: str) -> None: + """Fail hard if the staging tree isn't a well-formed catalog release.""" + version_file = staging / _VERSION_FILE + if not version_file.is_file(): + raise CatalogError("catalog tarball has no VERSION file at root") + actual = version_file.read_text().strip() + if actual != expected_version: + raise CatalogError( + f"catalog tarball VERSION ({actual!r}) doesn't match expected ({expected_version!r})" + ) + apps_root = staging / "apps" + if not apps_root.is_dir(): + raise CatalogError("catalog tarball has no apps/ directory") + for entry in sorted(apps_root.iterdir()): + if not entry.is_dir(): + continue + manifest_path = entry / "manifest.json" + if not manifest_path.exists(): + raise CatalogError(f"catalog app {entry.name!r} has no manifest.json") + try: + load_manifest(manifest_path, expected_name=entry.name) + except ManifestError as e: + raise CatalogError(f"catalog app {entry.name!r}: invalid manifest: {e}") from e + + +def _atomic_swap(staging: Path) -> None: + """Move staging → live catalog, keeping the previous tree as .prev until + the rename succeeds so we never leave a half-written catalog on disk.""" + live = catalog_dir() + live.parent.mkdir(parents=True, exist_ok=True) + prev = live.with_name(live.name + _PREV_SUFFIX) + if prev.exists(): + shutil.rmtree(prev) + if live.exists(): + live.rename(prev) + try: + staging.rename(live) + except OSError as e: + if prev.exists(): + # try to restore the previous tree; if that also fails the box + # has no catalog at all until the next sync — still better than + # a partially-extracted tree. + try: + prev.rename(live) + except OSError: + pass + raise CatalogError(f"atomic catalog swap failed: {e}") from e + if prev.exists(): + shutil.rmtree(prev, ignore_errors=True) + + +def sync_catalog() -> CatalogCheck: + """End-to-end sync. Acquires the lock, writes state at each stage, and + leaves the live catalog untouched on any failure before the rename step. + """ + with acquire_lock(): + write_state("checking") + check = check_catalog() + if not check.update_available: + write_state("done", version=check.current or check.latest, note="already up to date") + return check + if not check.tarball_url or not check.sha256_url: + raise CatalogError("catalog release is missing tarball or sha256 asset") + + # Downloads land in a sibling of the live catalog so half-finished + # artefacts never pollute the live tree, and stay under /var/lib/ + # furtka/ so a sync interrupted by reboot can resume instead of + # starting over from /tmp (which clears). + dl_dir = catalog_dir().with_name(catalog_dir().name + _DOWNLOADS_NAME) + dl_dir.mkdir(parents=True, exist_ok=True) + tarball = dl_dir / f"furtka-apps-{check.latest}.tar.gz" + sha_file = dl_dir / f"furtka-apps-{check.latest}.tar.gz.sha256" + + write_state("downloading", latest=check.latest) + _rc.download(check.tarball_url, tarball, error_cls=CatalogError) + _rc.download(check.sha256_url, sha_file, error_cls=CatalogError) + + write_state("verifying", latest=check.latest) + expected = _rc.parse_sha256_sidecar(sha_file.read_text(), error_cls=CatalogError) + _rc.verify_tarball(tarball, expected, error_cls=CatalogError) + + write_state("extracting", latest=check.latest) + staging = catalog_dir().with_name(catalog_dir().name + _STAGING_NAME) + if staging.exists(): + shutil.rmtree(staging) + try: + _rc.extract_tarball(tarball, staging, error_cls=CatalogError) + _validate_staging(staging, check.latest) + except CatalogError: + shutil.rmtree(staging, ignore_errors=True) + raise + + write_state("swapping", latest=check.latest) + try: + _atomic_swap(staging) + except CatalogError: + shutil.rmtree(staging, ignore_errors=True) + raise + + write_state("done", version=check.latest, previous=check.current) + return check diff --git a/furtka/cli.py b/furtka/cli.py index 91dd41c..eb4452c 100644 --- a/furtka/cli.py +++ b/furtka/cli.py @@ -149,6 +149,60 @@ def _cmd_rollback(args: argparse.Namespace) -> int: return 0 +def _cmd_catalog_sync(args: argparse.Namespace) -> int: + from furtka import catalog + + if args.check: + try: + check = catalog.check_catalog() + except catalog.CatalogError as e: + print(f"error: {e}", file=sys.stderr) + return 2 + if args.json: + print( + json.dumps( + { + "current": check.current, + "latest": check.latest, + "update_available": check.update_available, + }, + indent=2, + ) + ) + elif check.update_available: + print(f"Catalog update available: {check.current or '(none)'} → {check.latest}") + else: + print(f"Catalog already up to date ({check.current or check.latest})") + return 0 + + try: + check = catalog.sync_catalog() + except catalog.CatalogError as e: + print(f"error: {e}", file=sys.stderr) + return 2 + if not check.update_available: + print(f"Catalog already up to date ({check.current or check.latest})") + else: + print(f"Synced catalog {check.current or '(none)'} → {check.latest}") + return 0 + + +def _cmd_catalog_status(args: argparse.Namespace) -> int: + from furtka import catalog + + current = catalog.read_current_catalog_version() + state = catalog.read_state() + if args.json: + print(json.dumps({"current": current, "state": state}, indent=2)) + return 0 + print(f"Catalog version: {current or '(none — run `furtka catalog sync`)'}") + if state: + print(f"Last sync stage: {state.get('stage', '?')} at {state.get('updated_at', '?')}") + else: + print("Last sync stage: (never)") + return 0 + + def build_parser() -> argparse.ArgumentParser: p = argparse.ArgumentParser(prog="furtka", description="Furtka resource manager") sub = p.add_subparsers(dest="command", required=True) @@ -212,6 +266,36 @@ def build_parser() -> argparse.ArgumentParser: ) rollback.set_defaults(func=_cmd_rollback) + catalog = sub.add_parser("catalog", help="Manage the apps catalog (daniel/furtka-apps)") + catalog_sub = catalog.add_subparsers(dest="subcommand", required=True) + + catalog_sync = catalog_sub.add_parser( + "sync", + help="Download and install the latest apps catalog from Forgejo", + ) + catalog_sync.add_argument( + "--check", + action="store_true", + help="Only check whether a catalog update is available; don't apply", + ) + catalog_sync.add_argument( + "--json", + action="store_true", + help="Emit machine-readable JSON (only honoured with --check)", + ) + catalog_sync.set_defaults(func=_cmd_catalog_sync) + + catalog_status = catalog_sub.add_parser( + "status", + help="Print the currently-installed catalog version and last-sync stage", + ) + catalog_status.add_argument( + "--json", + action="store_true", + help="Emit machine-readable JSON", + ) + catalog_status.set_defaults(func=_cmd_catalog_status) + return p diff --git a/furtka/installer.py b/furtka/installer.py index 87397cf..95b1157 100644 --- a/furtka/installer.py +++ b/furtka/installer.py @@ -1,8 +1,9 @@ import shutil from pathlib import Path +from furtka import sources from furtka.manifest import ManifestError, load_manifest -from furtka.paths import apps_dir, bundled_apps_dir +from furtka.paths import apps_dir # Values that an app's .env.example may use as obvious "fill me in" markers. # If any of these reach the live .env, install refuses — otherwise we'd ship @@ -58,17 +59,18 @@ def resolve_source(source: str) -> Path: """Resolve a `furtka app install ` arg to a real source folder. If `source` looks like a path (or exists on disk), use it. Otherwise treat - it as a bundled app name and look up under /opt/furtka/apps/. + it as an app name and look it up via `furtka.sources.resolve_app_name` — + which checks the synced catalog first and falls back to the bundled seed. """ p = Path(source) if p.is_dir(): return p if "/" in source or source.startswith("."): raise InstallError(f"{source!r} is not a directory") - bundled = bundled_apps_dir() / source - if bundled.is_dir(): - return bundled - raise InstallError(f"{source!r} not found as a path or bundled app") + resolved = sources.resolve_app_name(source) + if resolved is None: + raise InstallError(f"{source!r} not found as a path, catalog app, or bundled app") + return resolved.path def install_from(src: Path, settings: dict[str, str] | None = None) -> Path: diff --git a/furtka/paths.py b/furtka/paths.py index 9b6166d..db741c3 100644 --- a/furtka/paths.py +++ b/furtka/paths.py @@ -7,6 +7,10 @@ DEFAULT_APPS_DIR = Path("/var/lib/furtka/apps") # symlink. A flat /opt/furtka/apps path would break the Phase-2 self-update # flow (symlink swap wouldn't move the bundled-app tree along with the code). DEFAULT_BUNDLED_APPS_DIR = Path("/opt/furtka/current/apps") +# Catalog apps come from `furtka catalog sync` pulling the daniel/furtka-apps +# release tarball. Lives under /var/lib/furtka/ so it survives core self- +# updates — the resolver (furtka.sources) prefers it over the bundled seed. +DEFAULT_CATALOG_DIR = Path("/var/lib/furtka/catalog") def apps_dir() -> Path: @@ -15,3 +19,11 @@ def apps_dir() -> Path: def bundled_apps_dir() -> Path: return Path(os.environ.get("FURTKA_BUNDLED_APPS_DIR", DEFAULT_BUNDLED_APPS_DIR)) + + +def catalog_dir() -> Path: + return Path(os.environ.get("FURTKA_CATALOG_DIR", DEFAULT_CATALOG_DIR)) + + +def catalog_apps_dir() -> Path: + return catalog_dir() / "apps" diff --git a/furtka/sources.py b/furtka/sources.py new file mode 100644 index 0000000..77a1f4a --- /dev/null +++ b/furtka/sources.py @@ -0,0 +1,75 @@ +"""Single lookup layer for "where does app live right now?". + +Three origins an app folder can come from: + +- ``catalog`` — the daily-synced ``/var/lib/furtka/catalog/apps/`` tree + that ``furtka.catalog.sync_catalog`` maintains. +- ``bundled`` — the seed ``/opt/furtka/current/apps/`` tree shipped + inside the core release tarball. Used for first-boot before any + catalog sync has run, and as the fallback when the catalog is stale, + missing, or doesn't know about this app. +- ``local`` — an explicit directory path passed to ``furtka app install + /path/to/src``; bypasses this module entirely. + +Catalog wins on collision. The precedence is deliberate — when the user +pressed "Sync apps catalog" they want what they synced, not whatever the +core tarball happened to carry. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from furtka.paths import bundled_apps_dir, catalog_apps_dir + + +@dataclass(frozen=True) +class AppSource: + path: Path + origin: str # "catalog" | "bundled" | "local" + + +def resolve_app_name(name: str) -> AppSource | None: + """Return the source folder for a bundled/catalog app name. + + Checks catalog first, then bundled seed. Presence is tested by + ``manifest.json`` existing — an empty folder or a stray ``.env`` + won't register. Returns ``None`` if the name isn't known anywhere. + """ + cat = catalog_apps_dir() / name + if (cat / "manifest.json").is_file(): + return AppSource(cat, "catalog") + bundled = bundled_apps_dir() / name + if (bundled / "manifest.json").is_file(): + return AppSource(bundled, "bundled") + return None + + +def list_available() -> list[AppSource]: + """Catalog ∪ bundled, catalog wins on name collision. + + Each entry is a folder containing a manifest.json. Ordering is + alphabetical by folder name, which matches how the scanner sorts so + the UI list stays stable across sync/reboot. + """ + seen: dict[str, AppSource] = {} + cat_root = catalog_apps_dir() + if cat_root.is_dir(): + for entry in sorted(cat_root.iterdir()): + if not entry.is_dir(): + continue + if not (entry / "manifest.json").is_file(): + continue + seen[entry.name] = AppSource(entry, "catalog") + bundled_root = bundled_apps_dir() + if bundled_root.is_dir(): + for entry in sorted(bundled_root.iterdir()): + if not entry.is_dir(): + continue + if entry.name in seen: + continue + if not (entry / "manifest.json").is_file(): + continue + seen[entry.name] = AppSource(entry, "bundled") + return [seen[name] for name in sorted(seen)] diff --git a/furtka/updater.py b/furtka/updater.py index e36e6e7..88e2088 100644 --- a/furtka/updater.py +++ b/furtka/updater.py @@ -29,18 +29,18 @@ the updater at a tmpdir. from __future__ import annotations import fcntl -import hashlib import json import os import shutil import subprocess -import tarfile import time import urllib.error import urllib.request from dataclasses import dataclass from pathlib import Path +from furtka import _release_common as _rc + FORGEJO_HOST = os.environ.get("FURTKA_FORGEJO_HOST", "forgejo.sourcegate.online") FORGEJO_REPO = os.environ.get("FURTKA_FORGEJO_REPO", "daniel/furtka") _FURTKA_ROOT = Path(os.environ.get("FURTKA_ROOT", "/opt/furtka")) @@ -95,37 +95,11 @@ def read_current_version() -> str: return "dev" -def _forgejo_api(path: str) -> dict: - url = f"https://{FORGEJO_HOST}/api/v1/repos/{FORGEJO_REPO}{path}" - req = urllib.request.Request(url, headers={"Accept": "application/json"}) - try: - with urllib.request.urlopen(req, timeout=15) as resp: - return json.loads(resp.read()) - except (urllib.error.URLError, json.JSONDecodeError) as e: - raise UpdateError(f"forgejo api {url}: {e}") from e +def _forgejo_api(path: str) -> dict | list: + return _rc.forgejo_api(FORGEJO_HOST, FORGEJO_REPO, path, error_cls=UpdateError) -def _version_tuple(v: str) -> tuple: - """Compare CalVer tags like 26.1-alpha < 26.1-beta < 26.1 < 26.2-alpha. - - The "stable" release (no suffix) sorts after its own pre-releases. Uses a - tuple of (year, release, stage-rank, stage-tag). Stage rank: alpha=0, - beta=1, rc=2, stable=3, unknown=-1. - """ - stage_rank = {"alpha": 0, "beta": 1, "rc": 2} - head, _, suffix = v.partition("-") - try: - year_str, release_str = head.split(".", 1) - year = int(year_str) - release = int(release_str) - except (ValueError, IndexError): - return (-1, -1, -1, v) - if not suffix: - return (year, release, 3, "") - for name, rank in stage_rank.items(): - if suffix.startswith(name): - return (year, release, rank, suffix) - return (year, release, -1, suffix) +_version_tuple = _rc.version_tuple def check_update() -> UpdateCheck: @@ -165,57 +139,22 @@ def check_update() -> UpdateCheck: def _download(url: str, dest: Path) -> None: - dest.parent.mkdir(parents=True, exist_ok=True) - req = urllib.request.Request(url) - try: - with urllib.request.urlopen(req, timeout=60) as resp, dest.open("wb") as f: - shutil.copyfileobj(resp, f) - except urllib.error.URLError as e: - raise UpdateError(f"download {url}: {e}") from e + _rc.download(url, dest, error_cls=UpdateError) -def _sha256_of(path: Path) -> str: - h = hashlib.sha256() - with path.open("rb") as f: - for chunk in iter(lambda: f.read(1024 * 1024), b""): - h.update(chunk) - return h.hexdigest() +_sha256_of = _rc.sha256_of def verify_tarball(tarball: Path, expected_sha: str) -> None: - actual = _sha256_of(tarball) - if actual != expected_sha: - raise UpdateError(f"sha256 mismatch: expected {expected_sha}, got {actual}") + _rc.verify_tarball(tarball, expected_sha, error_cls=UpdateError) def _parse_sha256_sidecar(text: str) -> str: - """Extract the hash from a standard `sha256sum` sidecar line.""" - line = text.strip().split("\n", 1)[0].strip() - if not line: - raise UpdateError("empty sha256 sidecar") - return line.split()[0] + return _rc.parse_sha256_sidecar(text, error_cls=UpdateError) def _extract_tarball(tarball: Path, dest: Path) -> str: - """Extract the tarball and return the VERSION read from its root.""" - dest.mkdir(parents=True, exist_ok=True) - with tarfile.open(tarball, "r:gz") as tf: - # defensive: refuse entries that would escape dest - for member in tf.getmembers(): - if member.name.startswith(("/", "..")) or ".." in Path(member.name).parts: - raise UpdateError(f"refusing tarball entry {member.name!r}") - # Python 3.12+ grew a stricter default filter; opt into it where - # available to catch symlink-escape / device-node / setuid tricks - # that our regex check can't see. Older Pythons fall back to the - # historical permissive behaviour. - try: - tf.extractall(dest, filter="data") - except TypeError: - tf.extractall(dest) - version_file = dest / "VERSION" - if not version_file.is_file(): - raise UpdateError("tarball has no VERSION file at root") - return version_file.read_text().strip() + return _rc.extract_tarball(tarball, dest, error_cls=UpdateError) def _current_hostname() -> str: @@ -255,7 +194,15 @@ def _refresh_caddyfile(source: Path) -> bool: def _link_new_units(unit_dir: Path) -> list[str]: """`systemctl link` any unit file in unit_dir that isn't already symlinked - into /etc/systemd/system/. Returns the list of newly-linked unit names.""" + into /etc/systemd/system/. Returns the list of newly-linked unit names. + + Newly-linked `.timer` units are additionally `systemctl enable`d so that + a self-update introducing a timer (e.g. 26.5 → 26.6 adding + furtka-catalog-sync.timer) activates it automatically — the installer's + enable list only applies to fresh installs. A linked-but-disabled timer + never fires on its own, so without this step catalog sync would never + happen on upgraded boxes. + """ if not unit_dir.is_dir(): return [] linked = [] @@ -266,6 +213,8 @@ def _link_new_units(unit_dir: Path) -> list[str]: if target.exists() or target.is_symlink(): continue _run(["systemctl", "link", str(unit_file)]) + if unit_file.suffix == ".timer": + _run(["systemctl", "enable", unit_file.name]) linked.append(unit_file.name) return linked diff --git a/tests/test_api.py b/tests/test_api.py index c3dcd7f..8d9e805 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,10 +22,12 @@ VALID_MANIFEST = { def fake_dirs(tmp_path, monkeypatch): apps = tmp_path / "apps" bundled = tmp_path / "bundled" + catalog = tmp_path / "catalog" apps.mkdir() bundled.mkdir() monkeypatch.setenv("FURTKA_APPS_DIR", str(apps)) monkeypatch.setenv("FURTKA_BUNDLED_APPS_DIR", str(bundled)) + monkeypatch.setenv("FURTKA_CATALOG_DIR", str(catalog)) return apps, bundled @@ -51,17 +53,19 @@ def test_list_installed_empty(fake_dirs): assert api._list_installed() == [] -def test_list_bundled_empty(fake_dirs): - assert api._list_bundled() == [] +def test_list_available_empty(fake_dirs): + assert api._list_available() == [] -def test_list_bundled_shows_uninstalled(fake_dirs): +def test_list_available_shows_uninstalled(fake_dirs): _, bundled = fake_dirs _write_bundled(bundled, "fileshare") - out = api._list_bundled() + out = api._list_available() assert len(out) == 1 assert out[0]["name"] == "fileshare" assert "display_name" in out[0] + # Source field lets the UI later distinguish catalog from bundled seed. + assert out[0]["source"] == "bundled" # --- Icon inlining ---------------------------------------------------------- @@ -119,11 +123,11 @@ def test_read_icon_svg_rejects_javascript_url(tmp_path): assert api._read_icon_svg(tmp_path, "icon.svg") is None -def test_list_bundled_inlines_icon_svg(fake_dirs): +def test_list_available_inlines_icon_svg(fake_dirs): _, bundled = fake_dirs app = _write_bundled(bundled, "fileshare") _write_icon(app, _SIMPLE_SVG) - [entry] = api._list_bundled() + [entry] = api._list_available() assert entry["icon_svg"] == _SIMPLE_SVG @@ -136,18 +140,35 @@ def test_list_installed_inlines_icon_svg(fake_dirs, no_docker): assert entry["icon_svg"] == _SIMPLE_SVG -def test_list_bundled_hides_already_installed(fake_dirs, no_docker): +def test_list_available_hides_already_installed(fake_dirs, no_docker): apps, bundled = fake_dirs _write_bundled(bundled, "fileshare", env_example="A=real") status, _ = api._do_install("fileshare") assert status == 200 # Now bundled should NOT include fileshare anymore. - assert api._list_bundled() == [] + assert api._list_available() == [] # But installed list should. installed = api._list_installed() assert len(installed) == 1 and installed[0]["name"] == "fileshare" +def test_list_available_prefers_catalog_over_bundled(fake_dirs): + _, bundled = fake_dirs + catalog_root = bundled.parent / "catalog" / "apps" + catalog_root.mkdir(parents=True) + _write_bundled(bundled, "fileshare") + # A fileshare in the catalog as well — manifest version 0.2.0 to tell apart. + catalog_manifest = dict(VALID_MANIFEST, version="0.2.0") + cat_app = catalog_root / "fileshare" + cat_app.mkdir() + (cat_app / "manifest.json").write_text(json.dumps(catalog_manifest)) + + out = api._list_available() + assert len(out) == 1 + assert out[0]["source"] == "catalog" + assert out[0]["version"] == "0.2.0" + + def test_install_endpoint_rejects_placeholder(fake_dirs): _, bundled = fake_dirs _write_bundled(bundled, "fileshare", env_example="SMB_PASSWORD=changeme") @@ -581,3 +602,40 @@ def test_http_post_install_with_settings(fake_dirs, no_docker): finally: server.shutdown() server.server_close() + + +# --- Catalog endpoints ------------------------------------------------------ + + +def test_catalog_status_reports_absent_catalog(fake_dirs, monkeypatch): + """With no /var/lib/furtka/catalog/ on disk, status reports current=None + empty state.""" + # FURTKA_CATALOG_STATE is not touched by fake_dirs — point it at tmp so we + # don't hit the production path. + monkeypatch.setenv("FURTKA_CATALOG_STATE", str(fake_dirs[0].parent / "catalog-state.json")) + import importlib + + from furtka import catalog as c + + importlib.reload(c) + status, body = api._do_catalog_status() + assert status == 200 + assert body["current"] is None + assert body["state"] == {} + + +def test_catalog_check_surfaces_forgejo_error(fake_dirs, monkeypatch): + monkeypatch.setenv("FURTKA_CATALOG_STATE", str(fake_dirs[0].parent / "catalog-state.json")) + import importlib + + from furtka import _release_common as _rc + from furtka import catalog as c + + importlib.reload(c) + + def boom(host, repo, path, *, error_cls=RuntimeError): + raise error_cls("forgejo api down") + + monkeypatch.setattr(_rc, "forgejo_api", boom) + status, body = api._do_catalog_check() + assert status == 502 + assert "forgejo api down" in body["error"] diff --git a/tests/test_catalog.py b/tests/test_catalog.py new file mode 100644 index 0000000..1eccd2f --- /dev/null +++ b/tests/test_catalog.py @@ -0,0 +1,333 @@ +"""Tests for the apps-catalog sync flow. + +Same shape as ``tests/test_updater.py``: fixture reloads the module with +env-overridden paths, fake tarballs land in tmp_path, Forgejo API is +stubbed via ``urllib.request.urlopen`` monkeypatching so nothing talks +to the network. + +Asserts end-to-end atomicity: on any failure path — bad sha256, broken +tarball, invalid manifest — the live catalog dir is either left +untouched (if one existed) or absent (if it didn't). +""" + +from __future__ import annotations + +import io +import json +import tarfile +from pathlib import Path + +import pytest + + +@pytest.fixture +def catalog(tmp_path, monkeypatch): + monkeypatch.setenv("FURTKA_CATALOG_DIR", str(tmp_path / "var_lib_furtka_catalog")) + monkeypatch.setenv("FURTKA_CATALOG_STATE", str(tmp_path / "var_lib_furtka_catalog-state.json")) + monkeypatch.setenv("FURTKA_CATALOG_LOCK", str(tmp_path / "catalog.lock")) + monkeypatch.setenv("FURTKA_FORGEJO_HOST", "forgejo.test.local") + monkeypatch.setenv("FURTKA_CATALOG_REPO", "daniel/furtka-apps") + + import importlib + + from furtka import catalog as c + from furtka import paths as p + + importlib.reload(p) + importlib.reload(c) + return c + + +def _manifest(name: str = "fileshare") -> dict: + return { + "name": name, + "display_name": "Fileshare", + "version": "0.1.0", + "description": "Test fixture app", + "volumes": ["files"], + "ports": [445], + "icon": "icon.svg", + } + + +def _make_catalog_tarball( + path: Path, + version: str, + *, + apps: list[tuple[str, dict]] | None = None, + extra_entries: list[tuple[str, bytes]] | None = None, +) -> None: + """Build a minimal valid catalog tarball. + + `apps` is a list of (folder_name, manifest_dict). Each app folder gets + a `manifest.json` + a stub `docker-compose.yaml` + `icon.svg`. + `extra_entries` lets tests inject malformed content (path-traversal, + missing VERSION, ...) without rebuilding the helper. + """ + apps = apps if apps is not None else [("fileshare", _manifest())] + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode="w:gz") as tf: + entries: list[tuple[str, bytes]] = [("VERSION", f"{version}\n".encode())] + for folder, m in apps: + entries.append((f"apps/{folder}/manifest.json", json.dumps(m).encode())) + entries.append( + (f"apps/{folder}/docker-compose.yaml", b"services:\n app:\n image: scratch\n") + ) + entries.append((f"apps/{folder}/icon.svg", b"")) + if extra_entries: + entries.extend(extra_entries) + for name, data in entries: + info = tarfile.TarInfo(name=name) + info.size = len(data) + tf.addfile(info, io.BytesIO(data)) + path.write_bytes(buf.getvalue()) + + +def _stub_forgejo_release( + monkeypatch, + catalog, + *, + tag: str, + tarball_url: str = "https://forgejo.test.local/t.tar.gz", + sha_url: str = "https://forgejo.test.local/t.tar.gz.sha256", + releases: list | None = None, +): + """Patch ``_rc.forgejo_api`` so check_catalog sees a canned release list.""" + if releases is None: + releases = [ + { + "tag_name": tag, + "assets": [ + {"name": f"furtka-apps-{tag}.tar.gz", "browser_download_url": tarball_url}, + { + "name": f"furtka-apps-{tag}.tar.gz.sha256", + "browser_download_url": sha_url, + }, + ], + } + ] + + def fake_api(host, repo, path, *, error_cls=RuntimeError): + return releases + + from furtka import _release_common as _rc + + monkeypatch.setattr(_rc, "forgejo_api", fake_api) + + +def _stub_download(monkeypatch, catalog, mapping: dict[str, bytes]): + """Patch ``_rc.download`` so sync_catalog pulls from an in-memory map.""" + from furtka import _release_common as _rc + + def fake_download(url, dest, *, error_cls=RuntimeError): + if url not in mapping: + raise error_cls(f"test: no fake content for {url}") + dest.parent.mkdir(parents=True, exist_ok=True) + dest.write_bytes(mapping[url]) + + monkeypatch.setattr(_rc, "download", fake_download) + + +# --------------------------------------------------------------------------- # +# check_catalog +# --------------------------------------------------------------------------- # + + +def test_check_catalog_reports_update_when_versions_differ(catalog, monkeypatch, tmp_path): + # Pretend we already have catalog version 26.5 on disk; Forgejo reports 26.6. + catalog.catalog_dir().mkdir(parents=True) + (catalog.catalog_dir() / "VERSION").write_text("26.5\n") + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + + check = catalog.check_catalog() + assert check.current == "26.5" + assert check.latest == "26.6" + assert check.update_available is True + assert check.tarball_url.endswith(".tar.gz") + assert check.sha256_url.endswith(".sha256") + + +def test_check_catalog_reports_up_to_date_when_same_version(catalog, monkeypatch): + catalog.catalog_dir().mkdir(parents=True) + (catalog.catalog_dir() / "VERSION").write_text("26.5\n") + _stub_forgejo_release(monkeypatch, catalog, tag="26.5") + + check = catalog.check_catalog() + assert check.current == "26.5" + assert check.latest == "26.5" + assert check.update_available is False + + +def test_check_catalog_treats_missing_current_as_installable(catalog, monkeypatch): + # Fresh box, no catalog ever synced — any release is an update. + _stub_forgejo_release(monkeypatch, catalog, tag="26.5") + + check = catalog.check_catalog() + assert check.current is None + assert check.update_available is True + + +def test_check_catalog_raises_when_no_releases_published(catalog, monkeypatch): + _stub_forgejo_release(monkeypatch, catalog, tag="x", releases=[]) + with pytest.raises(catalog.CatalogError, match="no catalog releases"): + catalog.check_catalog() + + +# --------------------------------------------------------------------------- # +# sync_catalog — happy + error paths +# --------------------------------------------------------------------------- # + + +def test_sync_catalog_happy_path(catalog, monkeypatch, tmp_path): + import hashlib + + tarball_path = tmp_path / "tarball.tar.gz" + _make_catalog_tarball(tarball_path, "26.6") + tarball_bytes = tarball_path.read_bytes() + sha = hashlib.sha256(tarball_bytes).hexdigest() + + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + _stub_download( + monkeypatch, + catalog, + { + "https://forgejo.test.local/t.tar.gz": tarball_bytes, + "https://forgejo.test.local/t.tar.gz.sha256": ( + f"{sha} furtka-apps-26.6.tar.gz\n".encode() + ), + }, + ) + + check = catalog.sync_catalog() + assert check.latest == "26.6" + assert (catalog.catalog_dir() / "VERSION").read_text().strip() == "26.6" + assert (catalog.catalog_dir() / "apps" / "fileshare" / "manifest.json").is_file() + state = catalog.read_state() + assert state["stage"] == "done" + assert state["version"] == "26.6" + + +def test_sync_catalog_noop_when_already_current(catalog, monkeypatch, tmp_path): + catalog.catalog_dir().mkdir(parents=True) + (catalog.catalog_dir() / "VERSION").write_text("26.5\n") + _stub_forgejo_release(monkeypatch, catalog, tag="26.5") + + check = catalog.sync_catalog() + assert check.update_available is False + assert catalog.read_state()["stage"] == "done" + + +def test_sync_catalog_refuses_sha256_mismatch(catalog, monkeypatch, tmp_path): + tarball_path = tmp_path / "tarball.tar.gz" + _make_catalog_tarball(tarball_path, "26.6") + + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + _stub_download( + monkeypatch, + catalog, + { + "https://forgejo.test.local/t.tar.gz": tarball_path.read_bytes(), + # Hash for some OTHER content — will mismatch. + "https://forgejo.test.local/t.tar.gz.sha256": (b"0" * 64 + b" wrong.tar.gz\n"), + }, + ) + + with pytest.raises(catalog.CatalogError, match="sha256 mismatch"): + catalog.sync_catalog() + # Live catalog never existed, must still not exist after the failed sync. + assert not catalog.catalog_dir().exists() + + +def test_sync_catalog_refuses_tarball_with_invalid_manifest(catalog, monkeypatch, tmp_path): + import hashlib + + bad_manifest = {"name": "broken"} # missing required fields + + tarball_path = tmp_path / "tarball.tar.gz" + _make_catalog_tarball(tarball_path, "26.6", apps=[("broken", bad_manifest)]) + tarball_bytes = tarball_path.read_bytes() + sha = hashlib.sha256(tarball_bytes).hexdigest() + + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + _stub_download( + monkeypatch, + catalog, + { + "https://forgejo.test.local/t.tar.gz": tarball_bytes, + "https://forgejo.test.local/t.tar.gz.sha256": ( + f"{sha} furtka-apps-26.6.tar.gz\n".encode() + ), + }, + ) + + with pytest.raises(catalog.CatalogError, match="invalid manifest"): + catalog.sync_catalog() + # Staging was cleaned; live catalog never materialised. + assert not catalog.catalog_dir().exists() + + +def test_sync_catalog_preserves_existing_catalog_on_failure(catalog, monkeypatch, tmp_path): + """A failed sync must leave the previous live catalog intact so boxes + keep working until the next successful sync.""" + import hashlib + + # Seed a live catalog that represents a previous successful sync. + live = catalog.catalog_dir() + live.mkdir(parents=True) + (live / "VERSION").write_text("26.5\n") + (live / "apps").mkdir() + + bad_manifest = {"name": "broken"} # invalid + tarball_path = tmp_path / "tarball.tar.gz" + _make_catalog_tarball(tarball_path, "26.6", apps=[("broken", bad_manifest)]) + sha = hashlib.sha256(tarball_path.read_bytes()).hexdigest() + + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + _stub_download( + monkeypatch, + catalog, + { + "https://forgejo.test.local/t.tar.gz": tarball_path.read_bytes(), + "https://forgejo.test.local/t.tar.gz.sha256": f"{sha} x\n".encode(), + }, + ) + + with pytest.raises(catalog.CatalogError): + catalog.sync_catalog() + # The 26.5 live catalog survives the failed 26.6 sync. + assert (live / "VERSION").read_text().strip() == "26.5" + + +def test_sync_catalog_lock_contention(catalog, monkeypatch): + _stub_forgejo_release(monkeypatch, catalog, tag="26.6") + + # Hold the lock from outside; the real sync_catalog call must refuse. + first = catalog.acquire_lock() + try: + with pytest.raises(catalog.CatalogError, match="already in progress"): + catalog.sync_catalog() + finally: + first.close() + + +# --------------------------------------------------------------------------- # +# state + current-version helpers +# --------------------------------------------------------------------------- # + + +def test_read_current_catalog_version_absent(catalog): + assert catalog.read_current_catalog_version() is None + + +def test_read_current_catalog_version_empty_file(catalog): + catalog.catalog_dir().mkdir(parents=True) + (catalog.catalog_dir() / "VERSION").write_text("\n") + assert catalog.read_current_catalog_version() is None + + +def test_write_and_read_state_round_trip(catalog): + catalog.write_state("downloading", latest="26.6") + s = catalog.read_state() + assert s["stage"] == "downloading" + assert s["latest"] == "26.6" + assert "updated_at" in s diff --git a/tests/test_sources.py b/tests/test_sources.py new file mode 100644 index 0000000..cab3265 --- /dev/null +++ b/tests/test_sources.py @@ -0,0 +1,108 @@ +"""Tests for the catalog > bundled resolver.""" + +from __future__ import annotations + +import json +from pathlib import Path + +import pytest + + +def _manifest(name: str = "fileshare") -> dict: + return { + "name": name, + "display_name": "Fileshare", + "version": "0.1.0", + "description": "x", + "volumes": [], + "ports": [], + "icon": "icon.svg", + } + + +@pytest.fixture +def sources_mod(tmp_path, monkeypatch): + monkeypatch.setenv("FURTKA_CATALOG_DIR", str(tmp_path / "catalog")) + monkeypatch.setenv("FURTKA_BUNDLED_APPS_DIR", str(tmp_path / "bundled")) + + import importlib + + from furtka import paths as p + from furtka import sources as s + + importlib.reload(p) + importlib.reload(s) + return s + + +def _seed_app(root: Path, name: str, manifest: dict | None = None) -> Path: + folder = root / name + folder.mkdir(parents=True) + (folder / "manifest.json").write_text(json.dumps(manifest or _manifest(name))) + return folder + + +def test_resolve_app_name_returns_none_when_absent(sources_mod): + assert sources_mod.resolve_app_name("nope") is None + + +def test_resolve_app_name_prefers_catalog_over_bundled(sources_mod, tmp_path): + _seed_app(tmp_path / "catalog" / "apps", "fileshare") + _seed_app(tmp_path / "bundled", "fileshare") + + result = sources_mod.resolve_app_name("fileshare") + assert result is not None + assert result.origin == "catalog" + assert result.path.parent.name == "apps" + assert result.path.parent.parent.name == "catalog" + + +def test_resolve_app_name_falls_back_to_bundled(sources_mod, tmp_path): + _seed_app(tmp_path / "bundled", "fileshare") + + result = sources_mod.resolve_app_name("fileshare") + assert result is not None + assert result.origin == "bundled" + + +def test_resolve_app_name_ignores_folder_without_manifest(sources_mod, tmp_path): + # Empty folder is not a valid app even if the name matches. + (tmp_path / "catalog" / "apps" / "fileshare").mkdir(parents=True) + _seed_app(tmp_path / "bundled", "fileshare") + + result = sources_mod.resolve_app_name("fileshare") + # Catalog entry without manifest is skipped; bundled wins. + assert result.origin == "bundled" + + +def test_list_available_unions_catalog_and_bundled(sources_mod, tmp_path): + _seed_app(tmp_path / "catalog" / "apps", "fileshare") + _seed_app(tmp_path / "bundled", "otherapp") + + names = {s.path.name: s.origin for s in sources_mod.list_available()} + assert names == {"fileshare": "catalog", "otherapp": "bundled"} + + +def test_list_available_catalog_wins_on_collision(sources_mod, tmp_path): + _seed_app(tmp_path / "catalog" / "apps", "fileshare") + _seed_app(tmp_path / "bundled", "fileshare") + + entries = sources_mod.list_available() + assert len(entries) == 1 + assert entries[0].origin == "catalog" + + +def test_list_available_empty_when_neither_exists(sources_mod): + assert sources_mod.list_available() == [] + + +def test_list_available_skips_non_dirs_and_no_manifest(sources_mod, tmp_path): + # A plain file in catalog/apps and an empty dir in bundled — both ignored. + cat_root = tmp_path / "catalog" / "apps" + cat_root.mkdir(parents=True) + (cat_root / "not-a-dir.txt").write_text("x") + (tmp_path / "bundled" / "emptyapp").mkdir(parents=True) + _seed_app(tmp_path / "bundled", "realapp") + + entries = sources_mod.list_available() + assert [e.path.name for e in entries] == ["realapp"] diff --git a/tests/test_updater.py b/tests/test_updater.py index 5813258..55ef155 100644 --- a/tests/test_updater.py +++ b/tests/test_updater.py @@ -246,17 +246,25 @@ def test_link_new_units_only_links_missing(updater, tmp_path, monkeypatch): linked = updater._link_new_units(unit_dir) assert linked == ["furtka-bar.timer"] - # Only one systemctl link call — for the new timer, not the existing service. - assert len(seen) == 1 + # Two calls for the newly-linked timer: systemctl link + systemctl enable. + # The already-linked service is untouched. Timers need the follow-up + # `enable` so self-updates that introduce new timers don't leave them + # dormant — fresh installs get their enable via the webinstaller. + assert len(seen) == 2 assert seen[0][:2] == ["systemctl", "link"] assert seen[0][2].endswith("furtka-bar.timer") + assert seen[1] == ["systemctl", "enable", "furtka-bar.timer"] def test_extract_tarball_uses_data_filter_when_available(tmp_path, updater, monkeypatch): # Confirm we pass filter='data' to extractall on Python 3.12+; fall back - # cleanly on older runtimes. Capture the kwarg via a stub. + # cleanly on older runtimes. Capture the kwarg via a stub. tarfile lives + # in furtka._release_common after the extraction refactor, so we patch + # that module — updater._extract_tarball delegates there. + from furtka import _release_common as _rc + calls = [] - real_open = updater.tarfile.open # capture before monkeypatching + real_open = _rc.tarfile.open # capture before monkeypatching class _Recorder: def __init__(self, tarball): @@ -281,7 +289,7 @@ def test_extract_tarball_uses_data_filter_when_available(tmp_path, updater, monk tar = tmp_path / "t.tar.gz" _make_release_tarball(tar, "26.9-alpha") - monkeypatch.setattr(updater.tarfile, "open", lambda *a, **kw: _Recorder(tar)) + monkeypatch.setattr(_rc.tarfile, "open", lambda *a, **kw: _Recorder(tar)) dest = tmp_path / "dest" updater._extract_tarball(tar, dest) diff --git a/webinstaller/app.py b/webinstaller/app.py index 0392192..96adf01 100644 --- a/webinstaller/app.py +++ b/webinstaller/app.py @@ -263,6 +263,10 @@ _FURTKA_UNITS = ( "furtka-status.service", "furtka-status.timer", "furtka-welcome.service", + # Daily apps-catalog pull. Timer drives the service; the .service itself + # is oneshot and also callable ad-hoc via `furtka catalog sync`. + "furtka-catalog-sync.service", + "furtka-catalog-sync.timer", )