#!/usr/bin/env python3 """ Sync each world's pack pin files to match the addon manifests on disk, and flag BP→RP manifest dependency drift. Two silent failure modes that have bitten this project: 1. BP→RP dep mismatch: BP's manifest dependencies[].version doesn't match the actual RP header version. BDS still loads both, but the texture pipeline disconnects — items appear in inventory with no skin. 2. World pin staleness: world_resource_packs.json / world_behavior_packs.json pin {uuid, version}. When an RP version is bumped, the pin must move with it or BDS skips loading the pack entirely. Run before any deploy that touches a manifest: python3 scripts/sync-world-pins.py --audit-only # dep audit, no SSH python3 scripts/sync-world-pins.py # dry-run pin diff python3 scripts/sync-world-pins.py --apply # write pins to server python3 scripts/sync-world-pins.py --apply --restart # also restart affected """ from __future__ import annotations import argparse import json import os import pathlib import re import subprocess import sys from typing import Iterable ROOT = pathlib.Path(__file__).resolve().parent.parent COMPOSE = ROOT / "docker-compose.yml" # Volume → world-dir resolution. Lobby's volume holds multiple worlds; we read # server.properties' level-name to pick the active one. SERVICES = ("lobby", "jamie", "lyla", "mya") VOLUME_TPL = "/var/lib/docker/volumes/minecraft-multiworld_{svc}-data/_data" MOUNT_RE = re.compile(r"^\s*-\s+\./([\w\-/]+(_BP|_RP)):") SERVICE_HEADER_RE = re.compile(r"^ (\w+):\s*$") def parse_compose_mounts() -> dict[str, list[tuple[str, str]]]: """Return {service: [(kind, addon_path), ...]} for the four MC services.""" mounts: dict[str, list[tuple[str, str]]] = {s: [] for s in SERVICES} current: str | None = None with COMPOSE.open() as f: for line in f: m = SERVICE_HEADER_RE.match(line) if m and m.group(1) in mounts: current = m.group(1) continue if m and m.group(1) not in mounts: current = None continue if current is None: continue mm = MOUNT_RE.match(line) if mm: kind = "BP" if mm.group(2) == "_BP" else "RP" mounts[current].append((kind, mm.group(1))) return mounts def read_manifest(addon_path: str) -> dict | None: f = ROOT / addon_path / "manifest.json" if not f.exists(): return None return json.loads(f.read_text()) def audit_bp_rp_deps(mounts: dict[str, list[tuple[str, str]]]) -> list[str]: """Walk every BP manifest, look up referenced RP UUIDs, flag drift.""" rps: dict[str, tuple[list[int], str, str]] = {} bps: list[tuple[str, str, dict]] = [] seen: set[str] = set() for items in mounts.values(): for kind, path in items: if path in seen: continue seen.add(path) d = read_manifest(path) if not d: continue h = d.get("header", {}) if kind == "RP": rps[h["uuid"]] = (h["version"], h.get("name", "?"), path) else: bps.append((h.get("name", "?"), path, d)) issues: list[str] = [] for name, path, d in bps: for dep in d.get("dependencies", []) or []: uuid = dep.get("uuid") if uuid and uuid in rps: rp_ver, rp_name, _ = rps[uuid] if list(dep.get("version") or []) != list(rp_ver): issues.append( f" {name} ({path})\n" f" deps on RP {uuid} v{dep['version']} but {rp_name} is at v{rp_ver}" ) return issues def ssh_args(host: str, user: str, password: str) -> list[str]: return [ "sshpass", "-p", password, "ssh", "-o", "StrictHostKeyChecking=no", "-o", "BatchMode=no", f"{user}@{host}", ] def ssh_run(host: str, user: str, password: str, remote_cmd: str) -> str: """Run remote_cmd via SSH, return stdout. Raises on non-zero exit.""" proc = subprocess.run( ssh_args(host, user, password) + [remote_cmd], capture_output=True, text=True, ) if proc.returncode != 0: raise RuntimeError(f"ssh failed ({proc.returncode}): {proc.stderr.strip() or proc.stdout.strip()}") return proc.stdout def sudo_cat(host: str, user: str, password: str, remote_path: str) -> str: cmd = f"echo {password!r} | sudo -S cat {remote_path!r} 2>/dev/null" return ssh_run(host, user, password, cmd) def sudo_write(host: str, user: str, password: str, remote_path: str, content: str) -> None: """Write content to a root-owned file via sudo tee.""" # base64 round-trip avoids quoting horror for arbitrary JSON import base64 b64 = base64.b64encode(content.encode("utf-8")).decode("ascii") cmd = ( f"echo {password!r} | sudo -S bash -c " f"'echo {b64} | base64 -d > {remote_path!r}'" ) ssh_run(host, user, password, cmd) def resolve_world_dir(host: str, user: str, password: str, service: str) -> str | None: """Find the active world directory for a service via server.properties.""" vol = VOLUME_TPL.format(svc=service) try: props = sudo_cat(host, user, password, f"{vol}/server.properties") except Exception: return None level = None for line in props.splitlines(): if line.startswith("level-name="): level = line[len("level-name="):].strip() break if not level: # fall back to the only existing dir if unambiguous try: ls = ssh_run(host, user, password, f"echo {password!r} | sudo -S ls -1 {vol}/worlds") dirs = [d for d in ls.splitlines() if d.strip()] if len(dirs) == 1: level = dirs[0] except Exception: return None if not level: return None return f"{vol}/worlds/{level}" def diff_pins(target: list[dict], existing: list[dict]) -> dict: """Return added/realigned/kept lists for reporting.""" target_by_id = {e["pack_id"]: e for e in target} existing_by_id = {e["pack_id"]: e for e in existing} added = [t for t in target if t["pack_id"] not in existing_by_id] realigned = [ (existing_by_id[t["pack_id"]], t) for t in target if t["pack_id"] in existing_by_id and list(existing_by_id[t["pack_id"]]["version"]) != list(t["version"]) ] kept = [e for e in existing if e["pack_id"] not in target_by_id] return {"added": added, "realigned": realigned, "kept": kept} def merge_pins(target: list[dict], existing: list[dict]) -> list[dict]: target_ids = {e["pack_id"] for e in target} return [e for e in existing if e["pack_id"] not in target_ids] + target def fmt_pin(p: dict, name: str | None = None) -> str: suffix = f" {name}" if name else "" return f"{p['pack_id']} v{p['version']}{suffix}" def main() -> int: ap = argparse.ArgumentParser(description=__doc__.strip().splitlines()[0]) ap.add_argument("--apply", action="store_true", help="write changes to server") ap.add_argument("--restart", action="store_true", help="after --apply, restart affected containers") ap.add_argument("--audit-only", action="store_true", help="only run the local BP→RP dep audit") ap.add_argument("--world", choices=SERVICES, help="restrict to one service") ap.add_argument("--ssh-host", default="10.0.0.247") ap.add_argument("--ssh-user", default="sysadmin") ap.add_argument("--ssh-pass", default=os.environ.get("MC_SSH_PASS"), help="SSH password (or set MC_SSH_PASS)") args = ap.parse_args() if args.restart and not args.apply: ap.error("--restart requires --apply") mounts = parse_compose_mounts() # ── BP→RP dep audit (always) issues = audit_bp_rp_deps(mounts) total = sum(len(v) for v in mounts.values()) print(f"\nManifest audit: {total} mounts across {len(SERVICES)} services") if issues: print(f" ✗ {len(issues)} BP→RP dep mismatch(es):") for line in issues: print(line) print( "\nFix each BP's manifest.json dependencies[].version to match the " "actual RP header version, then re-run." ) else: print(" ✓ No BP→RP dep mismatches") if args.audit_only: return 1 if issues else 0 if not args.ssh_pass: ap.error("SSH password required — pass --ssh-pass or set MC_SSH_PASS") services = (args.world,) if args.world else SERVICES affected: list[str] = [] for svc in services: items = mounts.get(svc, []) target_bp = [] target_rp = [] for kind, path in items: d = read_manifest(path) if not d: continue h = d["header"] entry = {"pack_id": h["uuid"], "version": h["version"]} if kind == "BP": target_bp.append(entry) else: target_rp.append(entry) try: world_dir = resolve_world_dir(args.ssh_host, args.ssh_user, args.ssh_pass, svc) except Exception as e: print(f"\n{svc}: SSH error resolving world dir: {e}") continue if not world_dir: print(f"\n{svc}: could not resolve active world directory (skipping)") continue print(f"\n{svc} (world: {world_dir.split('/')[-1]})") changed_here = False for kind, target, fname in [("BP", target_bp, "world_behavior_packs.json"), ("RP", target_rp, "world_resource_packs.json")]: try: raw = sudo_cat(args.ssh_host, args.ssh_user, args.ssh_pass, f"{world_dir}/{fname}") existing = json.loads(raw) if raw.strip() else [] except Exception: existing = [] d = diff_pins(target, existing) n_add = len(d["added"]) n_re = len(d["realigned"]) n_keep = len(d["kept"]) if n_add == 0 and n_re == 0: print(f" {kind}: in sync ({len(target)} managed pins, {n_keep} unmanaged kept)") continue changed_here = True for p in d["added"]: print(f" {kind} add: {fmt_pin(p)}") for old, new in d["realigned"]: print(f" {kind} align: {new['pack_id']} v{old['version']} → v{new['version']}") print(f" {kind} unmanaged kept: {n_keep}") if args.apply: merged = merge_pins(target, existing) content = json.dumps(merged, indent=4) + "\n" sudo_write(args.ssh_host, args.ssh_user, args.ssh_pass, f"{world_dir}/{fname}", content) print(f" {kind}: wrote {len(merged)} pins") if changed_here: affected.append(svc) if not args.apply: if affected: print("\nRun with --apply to write the changes above to the server.") return 0 if args.restart and affected: print(f"\nRestarting containers: {', '.join(f'mc-{s}' for s in affected)}") cmd = ( f"cd /home/sysadmin/minecraft-multiworld && " f"docker compose restart {' '.join(affected)}" ) try: print(ssh_run(args.ssh_host, args.ssh_user, args.ssh_pass, cmd)) except Exception as e: print(f" restart failed: {e}") return 2 elif args.restart: print("\nNothing to restart — no pins changed.") return 0 if __name__ == "__main__": sys.exit(main())