diff --git a/scripts/coverage.sh b/scripts/coverage.sh index ca7252e..a8e5bf4 100755 --- a/scripts/coverage.sh +++ b/scripts/coverage.sh @@ -27,7 +27,7 @@ cleanup() { } trap cleanup EXIT -IGNORE_REGEX='src/bin/repository_view_stats\.rs|src/bin/trace_arin_missing_vrps\.rs|src/bin/db_stats\.rs|src/bin/rrdp_state_dump\.rs|src/bin/ccr_dump\.rs|src/bin/ccr_verify\.rs|src/bin/ccr_to_routinator_csv\.rs|src/bin/ccr_to_compare_views\.rs|src/bin/cir_materialize\.rs|src/bin/cir_extract_inputs\.rs|src/bin/cir_drop_report\.rs|src/bin/cir_ta_only_fixture\.rs|src/ccr/compare_view\.rs|src/progress_log\.rs|src/cli\.rs|src/validation/run_tree_from_tal\.rs|src/validation/tree_parallel\.rs|src/validation/from_tal\.rs|src/sync/store_projection\.rs|src/cir/materialize\.rs' +IGNORE_REGEX='src/bin/repository_view_stats\.rs|src/bin/trace_arin_missing_vrps\.rs|src/bin/db_stats\.rs|src/bin/rrdp_state_dump\.rs|src/bin/ccr_dump\.rs|src/bin/ccr_verify\.rs|src/bin/ccr_to_routinator_csv\.rs|src/bin/ccr_to_compare_views\.rs|src/bin/cir_materialize\.rs|src/bin/cir_extract_inputs\.rs|src/bin/cir_drop_report\.rs|src/bin/cir_ta_only_fixture\.rs|src/bin/cir_dump_reject_list\.rs|src/bin/rpki_object_parse\.rs|src/bin/triage_ccr_cir_pair\.rs|src/ccr/compare_view\.rs|src/progress_log\.rs|src/cli\.rs|src/validation/run_tree_from_tal\.rs|src/validation/tree_parallel\.rs|src/validation/from_tal\.rs|src/sync/store_projection\.rs|src/cir/materialize\.rs' # Preserve colored output even though we post-process output by running under a pseudo-TTY. # We run tests only once, then generate both CLI text + HTML reports without rerunning tests. diff --git a/scripts/experiments/feature035/experiments.json b/scripts/experiments/feature035/experiments.json new file mode 100644 index 0000000..b0888e3 --- /dev/null +++ b/scripts/experiments/feature035/experiments.json @@ -0,0 +1,41 @@ +{ + "schemaVersion": 1, + "defaultRirs": ["afrinic", "apnic", "arin", "lacnic", "ripe"], + "experiments": [ + { + "id": "sync-ours-rsync-only", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "ours", "mode": "standard", "protocol": "rsync-only" } + }, + { + "id": "sync-rpki-client-rsync-only", + "left": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rsync-only" } + }, + { + "id": "strict-name", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "ours", "mode": "strict-name", "protocol": "rrdp+rsync" } + }, + { + "id": "strict-cms-der", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "ours", "mode": "strict-cms-der", "protocol": "rrdp+rsync" } + }, + { + "id": "strict-signed-attrs", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "ours", "mode": "strict-signed-attrs", "protocol": "rrdp+rsync" } + }, + { + "id": "strict-all", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "ours", "mode": "strict-all", "protocol": "rrdp+rsync" } + }, + { + "id": "rp-implementation-standard", + "left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" }, + "right": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rrdp+rsync" } + } + ] +} diff --git a/scripts/experiments/feature035/feature035_bundle.py b/scripts/experiments/feature035/feature035_bundle.py new file mode 100755 index 0000000..4145617 --- /dev/null +++ b/scripts/experiments/feature035/feature035_bundle.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python3 +import argparse +import hashlib +import json +import os +import platform +import subprocess +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + + +RIR_FIXTURES = { + "afrinic": { + "tal": "tal/afrinic.tal", + "ta": "ta/afrinic-ta.cer", + }, + "apnic": { + "tal": "tal/apnic-rfc7730-https.tal", + "ta": "ta/apnic-ta.cer", + }, + "arin": { + "tal": "tal/arin.tal", + "ta": "ta/arin-ta.cer", + }, + "lacnic": { + "tal": "tal/lacnic.tal", + "ta": "ta/lacnic-ta.cer", + }, + "ripe": { + "tal": "tal/ripe-ncc.tal", + "ta": "ta/ripe-ncc-ta.cer", + }, +} + + +def utc_now() -> str: + return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z") + + +def sha256_file(path: Path) -> str: + hasher = hashlib.sha256() + with path.open("rb") as file: + for chunk in iter(lambda: file.read(1024 * 1024), b""): + hasher.update(chunk) + return hasher.hexdigest() + + +def read_tal_uris(path: Path) -> list[str]: + uris: list[str] = [] + with path.open("r", encoding="utf-8") as file: + for line in file: + item = line.strip() + if not item or item.startswith("#"): + if uris: + break + continue + if item.startswith(("rsync://", "https://", "http://")): + uris.append(item) + continue + if uris: + break + return uris + + +def first_uri(uris: list[str], prefixes: tuple[str, ...]) -> str | None: + for uri in uris: + if uri.startswith(prefixes): + return uri + return None + + +def parse_rirs(raw: str) -> list[str]: + rirs = [item.strip().lower() for item in raw.split(",") if item.strip()] + if not rirs: + raise SystemExit("RIR list must not be empty") + invalid = [item for item in rirs if item not in RIR_FIXTURES] + if invalid: + raise SystemExit( + f"invalid RIR(s): {','.join(invalid)}; allowed: {','.join(RIR_FIXTURES)}" + ) + return rirs + + +def rel_or_abs(path: Path, root: Path | None) -> str: + path = path.resolve() + if root is not None: + try: + return path.relative_to(root.resolve()).as_posix() + except ValueError: + pass + return path.as_posix() + + +def git_commit(repo_root: Path) -> str | None: + try: + return subprocess.check_output( + ["git", "-C", str(repo_root), "rev-parse", "--short", "HEAD"], + text=True, + stderr=subprocess.DEVNULL, + ).strip() + except (subprocess.CalledProcessError, FileNotFoundError): + return None + + +def write_json(path: Path, value: dict[str, Any]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(value, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + + +def build_fixture_proof( + fixture_dir: Path, + rirs: list[str], + repo_root: Path | None, + ta_online_fetch_observed: bool, +) -> dict[str, Any]: + trust_anchors = [] + for rir in rirs: + mapping = RIR_FIXTURES[rir] + tal_path = fixture_dir / mapping["tal"] + ta_path = fixture_dir / mapping["ta"] + if not tal_path.is_file(): + raise SystemExit(f"missing TAL fixture for {rir}: {tal_path}") + if not ta_path.is_file(): + raise SystemExit(f"missing TA fixture for {rir}: {ta_path}") + tal_uris = read_tal_uris(tal_path) + trust_anchors.append( + { + "rir": rir, + "talPath": rel_or_abs(tal_path, repo_root), + "taPath": rel_or_abs(ta_path, repo_root), + "talUri": first_uri(tal_uris, ("https://", "http://")), + "taRsyncUri": first_uri(tal_uris, ("rsync://",)), + "talSha256": sha256_file(tal_path), + "taCertificateSha256": sha256_file(ta_path), + "talBytes": tal_path.stat().st_size, + "taCertificateBytes": ta_path.stat().st_size, + "taFixturePinned": not ta_online_fetch_observed, + "taOnlineFetchObserved": ta_online_fetch_observed, + } + ) + return { + "schemaVersion": 1, + "generatedBy": "feature035-experiment-driver", + "generatedAtUtc": utc_now(), + "fixtureDir": rel_or_abs(fixture_dir, repo_root), + "all5": set(rirs) == set(RIR_FIXTURES), + "rirs": rirs, + "trustAnchors": trust_anchors, + } + + +def parse_csv(raw: str) -> list[str]: + if not raw: + return [] + return [item.strip() for item in raw.split(",") if item.strip()] + + +def optional_path(raw: str | None, repo_root: Path | None) -> str | None: + if raw is None: + return None + return rel_or_abs(Path(raw), repo_root) + + +def build_run_meta(args: argparse.Namespace) -> dict[str, Any]: + rirs = parse_rirs(args.rirs) + repo_root = Path(args.repo_root).resolve() if args.repo_root else None + argv = json.loads(args.argv_json) if args.argv_json else [] + env_whitelist = json.loads(args.env_json) if args.env_json else {} + fixture_proof_summary = ( + json.loads(args.fixture_proof_summary_json) + if args.fixture_proof_summary_json + else None + ) + fixture_proof = None + if args.fixture_proof: + fixture_proof_path = Path(args.fixture_proof) + if fixture_proof_path.is_file(): + fixture_proof = json.loads(fixture_proof_path.read_text(encoding="utf-8")) + if not isinstance(argv, list): + raise SystemExit("--argv-json must decode to a JSON array") + if not isinstance(env_whitelist, dict): + raise SystemExit("--env-json must decode to a JSON object") + if fixture_proof_summary is not None and not isinstance(fixture_proof_summary, dict): + raise SystemExit("--fixture-proof-summary-json must decode to a JSON object") + + return { + "schemaVersion": 1, + "generatedBy": "feature035-experiment-driver", + "generatedAtUtc": utc_now(), + "experimentId": args.experiment_id, + "side": args.side, + "sideLabel": args.side_label, + "step": args.step, + "runId": args.run_id, + "liveRun": not args.replay_used, + "replayUsed": args.replay_used, + "rp": { + "kind": args.rp_kind, + "binary": args.rp_binary, + "version": args.rp_version, + "gitCommit": args.rp_git_commit, + "mode": args.rp_mode, + "protocolMode": args.protocol_mode, + "strictPolicies": parse_csv(args.strict_policies), + }, + "scope": { + "rirs": rirs, + "all5": set(rirs) == set(RIR_FIXTURES), + }, + "command": { + "argv": argv, + "cwd": args.cwd, + "envWhitelist": env_whitelist, + }, + "state": { + "resetBeforeRun": args.reset_before_run, + "stateRoot": args.state_root, + "db": args.db, + "repoBytesDb": args.repo_bytes_db, + "rawStoreDb": args.raw_store_db, + "rsyncMirrorRoot": args.rsync_mirror_root, + "cacheRoot": args.cache_root, + }, + "artifacts": { + "ccr": optional_path(args.ccr, repo_root), + "cir": optional_path(args.cir, repo_root), + "runMeta": optional_path(str(args.out), repo_root), + "fixtureProof": optional_path(args.fixture_proof, repo_root), + "reportJson": optional_path(args.report_json, repo_root), + "stageTimingJson": optional_path(args.stage_timing_json, repo_root), + "stdoutLog": optional_path(args.stdout_log, repo_root), + "stderrLog": optional_path(args.stderr_log, repo_root), + "processTime": optional_path(args.process_time, repo_root), + "vrpsCsv": optional_path(args.vrps_csv, repo_root), + "vapsCsv": optional_path(args.vaps_csv, repo_root), + }, + "fixtureProof": fixture_proof, + "fixtureProofSummary": fixture_proof_summary, + "metrics": { + "exitCode": args.exit_code, + "wallMs": args.wall_ms, + "maxRssKb": args.max_rss_kb, + "vrps": args.vrps, + "vaps": args.vaps, + "publicationPoints": args.publication_points, + "warnings": args.warnings, + "cirObjectCount": args.cir_object_count, + "cirRejectCount": args.cir_reject_count, + "cirTrustAnchorCount": args.cir_trust_anchor_count, + "ccrStateDigest": args.ccr_state_digest, + }, + "environment": { + "host": args.host or platform.node(), + "platform": args.platform or platform.platform(), + }, + } + + +def command_fixture_proof(args: argparse.Namespace) -> None: + repo_root = Path(args.repo_root).resolve() if args.repo_root else None + proof = build_fixture_proof( + fixture_dir=Path(args.fixture_dir), + rirs=parse_rirs(args.rirs), + repo_root=repo_root, + ta_online_fetch_observed=args.ta_online_fetch_observed, + ) + write_json(Path(args.out), proof) + + +def command_run_meta(args: argparse.Namespace) -> None: + write_json(Path(args.out), build_run_meta(args)) + + +def command_dry_run_bundle(args: argparse.Namespace) -> None: + out_dir = Path(args.out_dir) + repo_root = Path(args.repo_root).resolve() if args.repo_root else Path.cwd().resolve() + fixture_proof = out_dir / "fixture-proof.json" + command_fixture_proof( + argparse.Namespace( + fixture_dir=args.fixture_dir, + rirs=args.rirs, + repo_root=str(repo_root), + out=str(fixture_proof), + ta_online_fetch_observed=False, + ) + ) + for side, side_label in (("left", "A"), ("right", "B")): + run_dir = out_dir / side_label / "snapshot" + meta_args = argparse.Namespace( + out=run_dir / "run-meta.json", + repo_root=str(repo_root), + experiment_id=args.experiment_id, + side=side, + side_label=side_label, + step="snapshot", + run_id=f"{side_label}-snapshot-dry-run", + replay_used=False, + rp_kind="ours" if side_label == "A" else "rpki-client", + rp_binary=f"bin/{'rpki' if side_label == 'A' else 'rpki-client'}", + rp_version="dry-run", + rp_git_commit=git_commit(repo_root), + rp_mode="standard", + protocol_mode="rrdp+rsync", + strict_policies="", + rirs=args.rirs, + argv_json=json.dumps(["dry-run"]), + env_json=json.dumps({"RPKI_PROGRESS_LOG": "1"}), + cwd=str(out_dir), + reset_before_run=True, + state_root=str(out_dir / side_label / "state"), + db=str(out_dir / side_label / "state" / "work-db"), + repo_bytes_db=str(out_dir / side_label / "state" / "repo-bytes.db"), + raw_store_db=str(out_dir / side_label / "state" / "raw-store.db"), + rsync_mirror_root=str(out_dir / side_label / "state" / "rsync-mirror"), + cache_root=str(out_dir / side_label / "state" / "cache"), + ccr=str(run_dir / "result.ccr"), + cir=str(run_dir / "result.cir"), + fixture_proof=str(fixture_proof), + report_json=str(run_dir / "report.json"), + stage_timing_json=str(run_dir / "stage-timing.json"), + stdout_log=str(run_dir / "stdout.log"), + stderr_log=str(run_dir / "stderr.log"), + process_time=str(run_dir / "process-time.txt"), + vrps_csv=str(run_dir / "vrps.csv"), + vaps_csv=str(run_dir / "vaps.csv"), + exit_code=0, + wall_ms=0, + max_rss_kb=0, + vrps=0, + vaps=0, + publication_points=0, + warnings=0, + cir_object_count=0, + cir_reject_count=0, + cir_trust_anchor_count=len(parse_rirs(args.rirs)), + ccr_state_digest=None, + fixture_proof_summary_json=json.dumps( + { + "taFixturePinned": True, + "taOnlineFetchObserved": False, + "trustAnchorCount": len(parse_rirs(args.rirs)), + } + ), + ) + command_run_meta(meta_args) + + +def add_run_meta_args(parser: argparse.ArgumentParser) -> None: + parser.add_argument("--out", required=True) + parser.add_argument("--repo-root") + parser.add_argument("--experiment-id", required=True) + parser.add_argument("--side", choices=["left", "right"], required=True) + parser.add_argument("--side-label", choices=["A", "B"], required=True) + parser.add_argument("--step", choices=["snapshot", "delta"], required=True) + parser.add_argument("--run-id", required=True) + parser.add_argument("--replay-used", action="store_true") + parser.add_argument("--rp-kind", required=True) + parser.add_argument("--rp-binary", required=True) + parser.add_argument("--rp-version") + parser.add_argument("--rp-git-commit") + parser.add_argument("--rp-mode", default="standard") + parser.add_argument("--protocol-mode", default="rrdp+rsync") + parser.add_argument("--strict-policies", default="") + parser.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe") + parser.add_argument("--argv-json") + parser.add_argument("--env-json") + parser.add_argument("--cwd", default=os.getcwd()) + parser.add_argument("--reset-before-run", action="store_true") + parser.add_argument("--state-root") + parser.add_argument("--db") + parser.add_argument("--repo-bytes-db") + parser.add_argument("--raw-store-db") + parser.add_argument("--rsync-mirror-root") + parser.add_argument("--cache-root") + parser.add_argument("--ccr") + parser.add_argument("--cir") + parser.add_argument("--fixture-proof") + parser.add_argument("--fixture-proof-summary-json") + parser.add_argument("--report-json") + parser.add_argument("--stage-timing-json") + parser.add_argument("--stdout-log") + parser.add_argument("--stderr-log") + parser.add_argument("--process-time") + parser.add_argument("--vrps-csv") + parser.add_argument("--vaps-csv") + parser.add_argument("--exit-code", type=int) + parser.add_argument("--wall-ms", type=int) + parser.add_argument("--max-rss-kb", type=int) + parser.add_argument("--vrps", type=int) + parser.add_argument("--vaps", type=int) + parser.add_argument("--publication-points", type=int) + parser.add_argument("--warnings", type=int) + parser.add_argument("--cir-object-count", type=int) + parser.add_argument("--cir-reject-count", type=int) + parser.add_argument("--cir-trust-anchor-count", type=int) + parser.add_argument("--ccr-state-digest") + parser.add_argument("--host") + parser.add_argument("--platform") + + +def main() -> None: + parser = argparse.ArgumentParser(description="Feature #035 CCR/CIR experiment bundle helpers") + subparsers = parser.add_subparsers(dest="command", required=True) + + fixture = subparsers.add_parser("fixture-proof") + fixture.add_argument("--fixture-dir", default="tests/fixtures") + fixture.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe") + fixture.add_argument("--repo-root") + fixture.add_argument("--out", required=True) + fixture.add_argument("--ta-online-fetch-observed", action="store_true") + fixture.set_defaults(func=command_fixture_proof) + + run_meta = subparsers.add_parser("run-meta") + add_run_meta_args(run_meta) + run_meta.set_defaults(func=command_run_meta) + + dry_run = subparsers.add_parser("dry-run-bundle") + dry_run.add_argument("--out-dir", required=True) + dry_run.add_argument("--repo-root", default=".") + dry_run.add_argument("--fixture-dir", default="tests/fixtures") + dry_run.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe") + dry_run.add_argument("--experiment-id", default="m2-dry-run") + dry_run.set_defaults(func=command_dry_run_bundle) + + args = parser.parse_args() + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/scripts/experiments/feature035/fixture-manifest.json b/scripts/experiments/feature035/fixture-manifest.json new file mode 100644 index 0000000..0d068cb --- /dev/null +++ b/scripts/experiments/feature035/fixture-manifest.json @@ -0,0 +1,10 @@ +{ + "schemaVersion": 1, + "rirs": { + "afrinic": { "tal": "tal/afrinic.tal", "ta": "ta/afrinic-ta.cer" }, + "apnic": { "tal": "tal/apnic-rfc7730-https.tal", "ta": "ta/apnic-ta.cer" }, + "arin": { "tal": "tal/arin.tal", "ta": "ta/arin-ta.cer" }, + "lacnic": { "tal": "tal/lacnic.tal", "ta": "ta/lacnic-ta.cer" }, + "ripe": { "tal": "tal/ripe-ncc.tal", "ta": "ta/ripe-ncc-ta.cer" } + } +} diff --git a/scripts/experiments/feature035/run_feature035_experiment.py b/scripts/experiments/feature035/run_feature035_experiment.py new file mode 100755 index 0000000..f6196d1 --- /dev/null +++ b/scripts/experiments/feature035/run_feature035_experiment.py @@ -0,0 +1,1152 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import os +import shlex +import shutil +import subprocess +import sys +import time +from dataclasses import dataclass +from pathlib import Path +from typing import Any + + +SCRIPT_DIR = Path(__file__).resolve().parent +REPO_ROOT = SCRIPT_DIR.parents[2] +DEV_ROOT = REPO_ROOT.parents[1] +PORTABLE_ROOT = DEV_ROOT / "rpki-client-portable" +FEATURE_BUNDLE = SCRIPT_DIR / "feature035_bundle.py" +EXPERIMENTS_PATH = SCRIPT_DIR / "experiments.json" +FIXTURE_MANIFEST_PATH = SCRIPT_DIR / "fixture-manifest.json" + + +def load_json(path: Path) -> Any: + with path.open("r", encoding="utf-8") as handle: + return json.load(handle) + + +def write_json(path: Path, value: Any) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w", encoding="utf-8") as handle: + json.dump(value, handle, indent=2, ensure_ascii=False) + handle.write("\n") + + +def ensure_dir(path: Path) -> None: + path.mkdir(parents=True, exist_ok=True) + + +def git_commit(repo_root: Path) -> str: + result = run_local( + ["git", "-C", str(repo_root), "rev-parse", "--short", "HEAD"], + capture=True, + check=False, + ) + return result.stdout.strip() if result.returncode == 0 else "" + + +def run_local( + argv: list[str], + *, + cwd: Path | None = None, + check: bool = True, + capture: bool = False, + env: dict[str, str] | None = None, +) -> subprocess.CompletedProcess[str]: + result = subprocess.run( + argv, + cwd=str(cwd) if cwd else None, + text=True, + check=False, + capture_output=capture, + env=env, + ) + if check and result.returncode != 0: + raise SystemExit( + f"command failed ({result.returncode}): {' '.join(shlex.quote(item) for item in argv)}\n" + f"stdout:\n{result.stdout if result.stdout else ''}\n" + f"stderr:\n{result.stderr if result.stderr else ''}" + ) + return result + + +def ssh_script(target: str, script: str, *, check: bool = True) -> subprocess.CompletedProcess[str]: + result = subprocess.run( + ["ssh", target, "bash", "-s"], + input=script, + text=True, + check=False, + ) + if check and result.returncode != 0: + raise SystemExit(f"remote script failed ({result.returncode}) on {target}") + return result + + +def rsync_to_remote(target: str, source: Path, destination: str) -> None: + run_local(["rsync", "-a", str(source), f"{target}:{destination}"]) + + +def rsync_dir_to_remote(target: str, source: Path, destination: str) -> None: + run_local(["rsync", "-a", f"{source}/", f"{target}:{destination}/"]) + + +def rsync_from_remote(target: str, source: str, destination: Path) -> None: + ensure_dir(destination) + run_local(["rsync", "-a", f"{target}:{source}/", f"{destination}/"]) + + +def rel_cmd(parts: list[str]) -> str: + return shlex.join(str(part) for part in parts) + + +def utc_stamp() -> str: + return time.strftime("%Y%m%dT%H%M%SZ", time.gmtime()) + + +def detect_libtls_path(rpki_client_bin: Path) -> Path: + ldd = run_local(["ldd", str(rpki_client_bin)], capture=True) + for line in ldd.stdout.splitlines(): + if "libtls.so.28" not in line: + continue + if "=>" not in line: + continue + candidate = line.split("=>", 1)[1].strip().split(" ", 1)[0] + path = Path(candidate) + if path.is_file(): + return path + fallback = DEV_ROOT / ".cache" / "rpki-client-9.8-cir" / "libtls.so.28" + if fallback.is_file(): + return fallback + raise SystemExit("unable to locate libtls.so.28 for rpki-client") + + +def parse_elapsed_to_ms(raw: str) -> int: + raw = raw.strip() + if not raw: + return 0 + if "-" in raw: + days, raw = raw.split("-", 1) + else: + days = "0" + parts = raw.split(":") + try: + if len(parts) == 4: + days = str(int(days) + int(parts[0])) + hours, minutes, seconds = parts[1:] + elif len(parts) == 3: + hours, minutes, seconds = parts + elif len(parts) == 2: + hours = "0" + minutes, seconds = parts + else: + hours = "0" + minutes = "0" + seconds = parts[0] + total_seconds = ( + int(days) * 86400 + int(hours) * 3600 + int(minutes) * 60 + float(seconds) + ) + except ValueError: + time_part = raw.rsplit(":", 1)[-1] + total_seconds = float(time_part) + return int(round(total_seconds * 1000)) + + +def parse_time_file(path: Path) -> dict[str, Any]: + data: dict[str, Any] = {} + if not path.is_file(): + return data + for line in path.read_text(encoding="utf-8", errors="replace").splitlines(): + if "Elapsed (wall clock) time" in line and ":" in line: + if "):" in line: + elapsed = line.rsplit("):", 1)[1] + else: + elapsed = line.rsplit(":", 1)[1] + data["wallMs"] = parse_elapsed_to_ms(elapsed) + elif "Maximum resident set size" in line and ":" in line: + try: + data["maxRssKb"] = int(line.rsplit(":", 1)[1].strip()) + except ValueError: + pass + elif "User time (seconds)" in line and ":" in line: + try: + data["userSeconds"] = float(line.rsplit(":", 1)[1].strip()) + except ValueError: + pass + elif "System time (seconds)" in line and ":" in line: + try: + data["systemSeconds"] = float(line.rsplit(":", 1)[1].strip()) + except ValueError: + pass + return data + + +def path_within(base: Path, path: Path) -> str: + return path.relative_to(base).as_posix() + + +def load_report_counts(report_path: Path) -> dict[str, Any]: + report = load_json(report_path) + publication_points = report.get("publication_points", []) + tree = report.get("tree", {}) + warnings = tree.get("warnings", []) + return { + "vrps": len(report.get("vrps", [])), + "aspas": len(report.get("aspas", [])), + "publicationPoints": len(publication_points), + "warnings": len(warnings) + + sum(len(pp.get("warnings", [])) for pp in publication_points if isinstance(pp, dict)), + "treeInstancesProcessed": tree.get("instances_processed"), + "treeInstancesFailed": tree.get("instances_failed"), + "reportJson": report, + } + + +def load_rpki_client_counts(report_path: Path) -> dict[str, Any]: + report = load_json(report_path) + metadata = report.get("metadata", {}) + roas = report.get("roas", []) + aspas = report.get("aspas", []) + return { + "vrps": int(metadata.get("vrps", len(roas))), + "uniqueVrps": int(metadata.get("uniquevrps", len(roas))), + "aspas": int(metadata.get("aspas", len(aspas))), + "uniqueVaps": int(metadata.get("uniquevaps", len(aspas))), + "repositories": int(metadata.get("repositories", 0)), + "warnings": 0, + "reportJson": report, + } + + +def load_cir_counts(cir_path: Path) -> dict[str, int]: + if not cir_path.is_file(): + return { + "cirObjectCount": 0, + "cirRejectCount": 0, + "cirTrustAnchorCount": 0, + } + result = run_local( + [ + str(REPO_ROOT / "target" / "release" / "cir_dump_reject_list"), + "--cir", + str(cir_path), + "--limit", + "0", + ], + capture=True, + check=False, + ) + if result.returncode != 0: + raise SystemExit(f"decode CIR failed: {cir_path}\n{result.stderr}") + values: dict[str, int] = {} + for line in result.stdout.splitlines(): + if "=" not in line: + continue + key, value = line.split("=", 1) + if key in {"object_count", "trust_anchor_count", "reject_count"}: + values[key] = int(value) + return { + "cirObjectCount": values.get("object_count", 0), + "cirRejectCount": values.get("reject_count", 0), + "cirTrustAnchorCount": values.get("trust_anchor_count", 0), + } + + +def copy_rpki_client_outputs(run_dir: Path) -> None: + if (run_dir / "json").is_file(): + shutil.copy2(run_dir / "json", run_dir / "report.json") + if (run_dir / "rpki.ccr").is_file(): + shutil.copy2(run_dir / "rpki.ccr", run_dir / "result.ccr") + if (run_dir / "rpki.cir").is_file(): + shutil.copy2(run_dir / "rpki.cir", run_dir / "result.cir") + + +def write_rpki_client_stage_timing(run_dir: Path) -> None: + report_path = run_dir / "report.json" + if not report_path.is_file(): + return + report = load_json(report_path) + metadata = report.get("metadata", {}) + stage_timing = { + "tool": "rpki-client-portable", + "metadata": { + "elapsedTimeSeconds": metadata.get("elapsedtime"), + "userTimeSeconds": metadata.get("usertime"), + "systemTimeSeconds": metadata.get("systemtime"), + }, + "counts": { + "repositories": metadata.get("repositories"), + "vrps": metadata.get("vrps"), + "uniqueVrps": metadata.get("uniquevrps"), + "vaps": metadata.get("vaps"), + "uniqueVaps": metadata.get("uniquevaps"), + }, + } + write_json(run_dir / "stage-timing.json", stage_timing) + + +def build_tool_binaries() -> None: + run_local( + [ + "cargo", + "build", + "--release", + "--bin", + "rpki", + "--bin", + "triage_ccr_cir_pair", + "--bin", + "cir_dump_reject_list", + ], + cwd=REPO_ROOT, + ) + run_local(["make", "-j2"], cwd=PORTABLE_ROOT) + + +def build_fixture_proof(run_root: Path, rirs: list[str]) -> Path: + fixture_dir = REPO_ROOT / "tests" / "fixtures" + fixture_proof = run_root / "fixture-proof.json" + run_local( + [ + sys.executable, + str(FEATURE_BUNDLE), + "fixture-proof", + "--fixture-dir", + str(fixture_dir), + "--repo-root", + str(run_root), + "--rirs", + ",".join(rirs), + "--out", + str(fixture_proof), + ] + ) + return fixture_proof + + +def experiment_steps(exp: dict[str, Any]) -> list[dict[str, str]]: + return [ + { + "step": "snapshot", + "left": exp["left"], + "right": exp["right"], + }, + { + "step": "delta", + "left": exp["left"], + "right": exp["right"], + }, + ] + + +def render_experiment_plan(exp: dict[str, Any], run_root: Path, remote_root: Path, ssh_target: str, rirs: list[str]) -> dict[str, Any]: + plan_steps: list[dict[str, Any]] = [] + for step in ("snapshot", "delta"): + plan_steps.append( + { + "step": step, + "leftRunDir": str(run_root / "experiments" / exp["id"] / "A" / step), + "rightRunDir": str(run_root / "experiments" / exp["id"] / "B" / step), + "remoteLeftRunDir": str(remote_root / "experiments" / exp["id"] / "A" / step), + "remoteRightRunDir": str(remote_root / "experiments" / exp["id"] / "B" / step), + "leftCommand": build_side_command(remote_root, exp["id"], exp["left"], "A", step, rirs), + "rightCommand": build_side_command(remote_root, exp["id"], exp["right"], "B", step, rirs), + } + ) + return { + "id": exp["id"], + "left": exp["left"], + "right": exp["right"], + "sshTarget": ssh_target, + "remoteRoot": str(remote_root), + "runRoot": str(run_root), + "steps": plan_steps, + } + + +def build_side_command( + remote_root: Path, + exp_id: str, + side: dict[str, Any], + side_label: str, + step: str, + rirs: list[str], +) -> str: + run_dir = remote_root / "experiments" / exp_id / side_label / step + state_dir = remote_root / "experiments" / exp_id / side_label / "state" / ("ours" if side["rpKind"] == "ours" else "rpki-client") + fixture_root = remote_root / "fixtures" + if side["rpKind"] == "ours": + argv = [ + str(remote_root / "bin" / "rpki"), + "--db", + str(state_dir / "work-db"), + "--raw-store-db", + str(state_dir / "raw-store.db"), + "--repo-bytes-db", + str(state_dir / "repo-bytes.db"), + ] + for rir in rirs: + argv.extend( + [ + "--tal-path", + str(fixture_root / "tal" / fixture_name(rir, "tal")), + "--ta-path", + str(fixture_root / "ta" / fixture_name(rir, "ta")), + ] + ) + if side["protocol"] == "rsync-only": + argv.append("--disable-rrdp") + if side["mode"] == "strict-name": + argv.extend(["--strict", "name"]) + elif side["mode"] == "strict-cms-der": + argv.extend(["--strict", "cms-der"]) + elif side["mode"] == "strict-signed-attrs": + argv.extend(["--strict", "signed-attrs"]) + elif side["mode"] == "strict-all": + argv.extend(["--strict", "all"]) + argv.extend( + [ + "--report-json", + str(run_dir / "report.json"), + "--report-json-compact", + "--ccr-out", + str(run_dir / "result.ccr"), + "--cir-enable", + "--cir-out", + str(run_dir / "result.cir"), + ] + ) + for rir in rirs: + argv.extend(["--cir-tal-uri", cir_tal_uri_for_rir(rir)]) + argv.extend( + [ + "--vrps-csv-out", + str(run_dir / "vrps.csv"), + "--vaps-csv-out", + str(run_dir / "vaps.csv"), + "--compare-view-trust-anchor", + compare_view_trust_anchor(rirs), + ] + ) + return "cd {run_dir} && {prefix} /usr/bin/time -v -o process-time.txt -- {cmd} > stdout.log 2> stderr.log".format( + run_dir=shlex.quote(str(run_dir)), + prefix=_setup_prefix(step, state_dir, "ours"), + cmd=rel_cmd(argv), + ) + + argv = [ + str(remote_root / "bin" / "rpki-client"), + "-vv", + "-S", + str(state_dir / "rpki-client-skiplist"), + ] + if side["protocol"] == "rsync-only": + argv.append("-R") + for rir in rirs: + argv.extend( + [ + "-t", + str(fixture_root / "tal" / fixture_name(rir, "tal")), + "-T", + f"{fixture_desc(rir)}:{state_dir / 'cache' / 'fixtures' / fixture_name(rir, 'ta')}", + ] + ) + argv.extend(["-d", str(state_dir / "cache"), str(run_dir)]) + return "cd {run_dir} && {prefix} LD_LIBRARY_PATH={lib} /usr/bin/time -v -o process-time.txt -- {cmd} > stdout.log 2> stderr.log".format( + run_dir=shlex.quote(str(run_dir)), + prefix=_setup_prefix(step, state_dir, "rpki-client"), + lib=shlex.quote(str(remote_root / "lib")), + cmd=rel_cmd(argv), + ) + + +def _setup_prefix(step: str, state_dir: Path, kind: str) -> str: + if step == "snapshot": + if kind == "ours": + return "rm -rf {state} && mkdir -p {state}/work-db {state}/rsync-mirror && chmod -R 0777 {state} && ".format( + state=shlex.quote(str(state_dir)) + ) + return "rm -rf {state} && mkdir -p {state}/cache {state}/cache/fixtures && touch {state}/rpki-client-skiplist && chmod -R 0777 {state} && ".format( + state=shlex.quote(str(state_dir)) + ) + if kind == "ours": + return "mkdir -p {state}/work-db {state}/rsync-mirror && chmod -R 0777 {state} && ".format( + state=shlex.quote(str(state_dir)) + ) + return "mkdir -p {state}/cache {state}/cache/fixtures && touch {state}/rpki-client-skiplist && chmod -R 0777 {state} && ".format( + state=shlex.quote(str(state_dir)) + ) + + +def fixture_name(rir: str, kind: str) -> str: + fixture_manifest = load_json(FIXTURE_MANIFEST_PATH) + return Path(fixture_manifest["rirs"][rir][kind]).name + + +def fixture_desc(rir: str) -> str: + return { + "afrinic": "afrinic", + "apnic": "apnic-rfc7730-https", + "arin": "arin", + "lacnic": "lacnic", + "ripe": "ripe-ncc", + }[rir] + + +def cir_tal_uri_for_rir(rir: str) -> str: + return { + "afrinic": "https://rpki.afrinic.net/tal/afrinic.tal", + "apnic": "https://rpki.apnic.net/tal/apnic-rfc7730-https.tal", + "arin": "https://www.arin.net/resources/manage/rpki/arin.tal", + "lacnic": "https://www.lacnic.net/innovaportal/file/4983/1/lacnic.tal", + "ripe": "https://tal.rpki.ripe.net/ripe-ncc.tal", + }[rir] + + +def compare_view_trust_anchor(rirs: list[str]) -> str: + return "all5" if len(rirs) > 1 else rirs[0] + + +def sanitize_run_meta( + run_root: Path, + exp_id: str, + side_label: str, + step: str, + rp_kind: str, + rp_mode: str, + protocol: str, + strict_policies: str, + rirs: list[str], + run_dir: Path, + fixture_proof: Path, + result_ccr: Path, + result_cir: Path, + report_json: Path, + stage_timing_json: Path, + process_time: Path, + stdout_log: Path, + stderr_log: Path, + exit_code: int, + counts: dict[str, Any], + time_info: dict[str, Any], + fixture_pinned: bool, +) -> Path: + repo_root = run_root + run_meta_path = run_dir / "run-meta.json" + run_meta_args = [ + sys.executable, + str(FEATURE_BUNDLE), + "run-meta", + "--out", + str(run_meta_path), + "--repo-root", + str(repo_root), + "--experiment-id", + exp_id, + "--side", + "left" if side_label == "A" else "right", + "--side-label", + side_label, + "--step", + step, + "--run-id", + f"{side_label}-{step}", + "--rp-kind", + rp_kind, + "--rp-binary", + "bin/rpki" if rp_kind == "ours" else "bin/rpki-client", + "--rp-version", + "portable" if rp_kind == "rpki-client" else "ours", + "--rp-mode", + rp_mode, + "--protocol-mode", + protocol, + "--strict-policies", + strict_policies, + "--rirs", + ",".join(rirs), + "--argv-json", + json.dumps([]), + "--env-json", + json.dumps({}), + "--cwd", + str(run_root), + "--reset-before-run", + "--state-root", + str(run_dir.parent / "state"), + "--db", + str(run_dir.parent / "state" / "work-db"), + "--repo-bytes-db", + str(run_dir.parent / "state" / "repo-bytes.db"), + "--raw-store-db", + str(run_dir.parent / "state" / "raw-store.db"), + "--rsync-mirror-root", + str(run_dir.parent / "state" / "rsync-mirror"), + "--cache-root", + str(run_dir.parent / "state" / ("cache" if rp_kind == "rpki-client" else "work-db")), + "--ccr", + str(result_ccr), + "--cir", + str(result_cir), + "--fixture-proof", + str(fixture_proof), + "--fixture-proof-summary-json", + json.dumps( + { + "taFixturePinned": fixture_pinned, + "taOnlineFetchObserved": False, + "trustAnchorCount": len(rirs), + } + ), + "--report-json", + str(report_json), + "--stage-timing-json", + str(stage_timing_json), + "--stdout-log", + str(stdout_log), + "--stderr-log", + str(stderr_log), + "--process-time", + str(process_time), + "--exit-code", + str(exit_code), + "--wall-ms", + str(time_info.get("wallMs", 0)), + "--max-rss-kb", + str(time_info.get("maxRssKb", 0)), + "--vrps", + str(counts.get("vrps", 0)), + "--vaps", + str(counts.get("aspas", 0)), + "--publication-points", + str(counts.get("publicationPoints", counts.get("repositories", 0))), + "--warnings", + str(counts.get("warnings", 0)), + "--cir-object-count", + str(counts.get("cirObjectCount", 0)), + "--cir-reject-count", + str(counts.get("cirRejectCount", 0)), + "--cir-trust-anchor-count", + str(counts.get("cirTrustAnchorCount", len(rirs))), + "--host", + os.uname().nodename, + "--platform", + sys.platform, + ] + run_local(run_meta_args) + return run_meta_path + + +def run_remote_step( + ssh_target: str, + remote_root: Path, + exp_id: str, + side_label: str, + step: str, + side: dict[str, Any], + rirs: list[str], +) -> None: + exp_root = remote_root / "experiments" / exp_id + run_dir = exp_root / side_label / step + state_dir = exp_root / side_label / "state" / ("ours" if side["rpKind"] == "ours" else "rpki-client") + ensure = [ + f"mkdir -p {shlex.quote(str(run_dir))}", + f"mkdir -p {shlex.quote(str(run_dir.parent))}", + f"chmod 0777 {shlex.quote(str(run_dir))}", + ] + if side["rpKind"] == "ours": + ensure.extend( + [ + f"mkdir -p {shlex.quote(str(state_dir / 'work-db'))}", + f"mkdir -p {shlex.quote(str(state_dir / 'rsync-mirror'))}", + f"chmod -R 0777 {shlex.quote(str(exp_root / side_label / 'state'))}", + ] + ) + if step == "snapshot": + ensure.insert(0, f"rm -rf {shlex.quote(str(exp_root / side_label / 'state' / 'ours'))}") + argv = [ + str(remote_root / "bin" / "rpki"), + "--db", + str(state_dir / "work-db"), + "--raw-store-db", + str(state_dir / "raw-store.db"), + "--repo-bytes-db", + str(state_dir / "repo-bytes.db"), + ] + for rir in rirs: + argv.extend( + [ + "--tal-path", + str(remote_root / "fixtures" / "tal" / fixture_name(rir, "tal")), + "--ta-path", + str(remote_root / "fixtures" / "ta" / fixture_name(rir, "ta")), + ] + ) + if side["protocol"] == "rsync-only": + argv.append("--disable-rrdp") + if side["mode"] == "strict-name": + argv.extend(["--strict", "name"]) + elif side["mode"] == "strict-cms-der": + argv.extend(["--strict", "cms-der"]) + elif side["mode"] == "strict-signed-attrs": + argv.extend(["--strict", "signed-attrs"]) + elif side["mode"] == "strict-all": + argv.extend(["--strict", "all"]) + argv.extend( + [ + "--report-json", + str(run_dir / "report.json"), + "--report-json-compact", + "--ccr-out", + str(run_dir / "result.ccr"), + "--cir-enable", + "--cir-out", + str(run_dir / "result.cir"), + ] + ) + for rir in rirs: + argv.extend(["--cir-tal-uri", cir_tal_uri_for_rir(rir)]) + argv.extend( + [ + "--vrps-csv-out", + str(run_dir / "vrps.csv"), + "--vaps-csv-out", + str(run_dir / "vaps.csv"), + "--compare-view-trust-anchor", + compare_view_trust_anchor(rirs), + ] + ) + else: + ensure.extend( + [ + f"mkdir -p {shlex.quote(str(state_dir / 'cache'))}", + f"mkdir -p {shlex.quote(str(state_dir / 'cache' / 'fixtures'))}", + f"touch {shlex.quote(str(state_dir / 'rpki-client-skiplist'))}", + f"chmod -R 0777 {shlex.quote(str(exp_root / side_label / 'state'))}", + ] + ) + if step == "snapshot": + ensure.insert(0, f"rm -rf {shlex.quote(str(exp_root / side_label / 'state' / 'rpki-client'))}") + for rir in rirs: + ensure.append( + f"cp -f {shlex.quote(str(remote_root / 'fixtures' / 'ta' / fixture_name(rir, 'ta')))} {shlex.quote(str(state_dir / 'cache' / 'fixtures' / fixture_name(rir, 'ta')))}" + ) + argv = [ + str(remote_root / "bin" / "rpki-client"), + "-vv", + "-S", + str(state_dir / "rpki-client-skiplist"), + ] + if side["protocol"] == "rsync-only": + argv.append("-R") + for rir in rirs: + argv.extend( + [ + "-t", + str(remote_root / "fixtures" / "tal" / fixture_name(rir, "tal")), + "-T", + f"{fixture_desc(rir)}:{state_dir / 'cache' / 'fixtures' / fixture_name(rir, 'ta')}", + ] + ) + argv.extend(["-d", str(state_dir / "cache"), str(run_dir)]) + + time_prefix = "/usr/bin/time" + if side["rpKind"] == "ours": + time_prefix = "env RPKI_PROGRESS_LOG=1 RPKI_PROGRESS_SLOW_SECS=10 /usr/bin/time" + elif side["rpKind"] == "rpki-client": + time_prefix = f"env LD_LIBRARY_PATH={shlex.quote(str(remote_root / 'lib'))} /usr/bin/time" + + command = ( + "set -euo pipefail; " + + "; ".join(ensure) + + "; " + + "set +e; " + + time_prefix + + " -v -o " + + shlex.quote(str(run_dir / "process-time.txt")) + + " -- " + + rel_cmd(argv) + + " > " + + shlex.quote(str(run_dir / "stdout.log")) + + " 2> " + + shlex.quote(str(run_dir / "stderr.log")) + + "; ec=$?; set -e; printf '%s\n' \"$ec\" > " + + shlex.quote(str(run_dir / "exit-code.txt")) + + "; true" + ) + ssh_script(ssh_target, command) + + if side["rpKind"] == "rpki-client": + copy_cmd = ( + f"[ -f {shlex.quote(str(run_dir / 'json'))} ] && cp -f {shlex.quote(str(run_dir / 'json'))} {shlex.quote(str(run_dir / 'report.json'))} || true; " + f"[ -f {shlex.quote(str(run_dir / 'rpki.ccr'))} ] && cp -f {shlex.quote(str(run_dir / 'rpki.ccr'))} {shlex.quote(str(run_dir / 'result.ccr'))} || true; " + f"[ -f {shlex.quote(str(run_dir / 'rpki.cir'))} ] && cp -f {shlex.quote(str(run_dir / 'rpki.cir'))} {shlex.quote(str(run_dir / 'result.cir'))} || true; " + f"[ -f {shlex.quote(str(run_dir / 'report.json'))} ] && python3 - <<'PY' {shlex.quote(str(run_dir / 'report.json'))} {shlex.quote(str(run_dir / 'stage-timing.json'))} || true\n" + "import json, sys\n" + "report = json.load(open(sys.argv[1]))\n" + "meta = report.get('metadata', {})\n" + "stage = {\n" + " 'tool': 'rpki-client-portable',\n" + " 'metadata': {\n" + " 'elapsedTimeSeconds': meta.get('elapsedtime'),\n" + " 'userTimeSeconds': meta.get('usertime'),\n" + " 'systemTimeSeconds': meta.get('systemtime'),\n" + " },\n" + " 'counts': {\n" + " 'repositories': meta.get('repositories'),\n" + " 'vrps': meta.get('vrps'),\n" + " 'uniqueVrps': meta.get('uniquevrps'),\n" + " 'vaps': meta.get('vaps'),\n" + " 'uniqueVaps': meta.get('uniquevaps'),\n" + " }\n" + "}\n" + "json.dump(stage, open(sys.argv[2], 'w'), indent=2)\n" + "print()\n" + "PY" + ) + ssh_script(ssh_target, copy_cmd) + + +def parse_exit_code(path: Path) -> int: + if not path.is_file(): + return 1 + return int(path.read_text(encoding="utf-8").strip() or "1") + + +def generate_run_meta( + local_exp_root: Path, + exp_id: str, + side_label: str, + step: str, + side: dict[str, Any], + rirs: list[str], + fixture_proof: Path, + run_dir: Path, +) -> Path: + result_ccr = run_dir / "result.ccr" + result_cir = run_dir / "result.cir" + report_json = run_dir / "report.json" + stage_timing_json = run_dir / "stage-timing.json" + process_time = run_dir / "process-time.txt" + stdout_log = run_dir / "stdout.log" + stderr_log = run_dir / "stderr.log" + exit_code = parse_exit_code(run_dir / "exit-code.txt") + time_info = parse_time_file(process_time) + if side["rpKind"] == "ours": + counts = load_report_counts(report_json) + counts.update(load_cir_counts(result_cir)) + else: + counts = load_rpki_client_counts(report_json) + counts.update(load_cir_counts(result_cir)) + strict_policies = "" + if side["mode"] == "strict-name": + strict_policies = "name" + elif side["mode"] == "strict-cms-der": + strict_policies = "cms-der" + elif side["mode"] == "strict-signed-attrs": + strict_policies = "signed-attrs" + elif side["mode"] == "strict-all": + strict_policies = "all" + + meta_path = local_exp_root / side_label / step / "run-meta.json" + generate_meta_args = [ + sys.executable, + str(FEATURE_BUNDLE), + "run-meta", + "--out", + str(meta_path), + "--repo-root", + str(local_exp_root), + "--experiment-id", + exp_id, + "--side", + "left" if side_label == "A" else "right", + "--side-label", + side_label, + "--step", + step, + "--run-id", + f"{side_label}-{step}", + "--rp-kind", + side["rpKind"], + "--rp-binary", + "bin/rpki" if side["rpKind"] == "ours" else "bin/rpki-client", + "--rp-version", + "portable-9.8" if side["rpKind"] == "rpki-client" else "ours-dev", + "--rp-git-commit", + git_commit(REPO_ROOT) or "", + "--rp-mode", + side["mode"], + "--protocol-mode", + side["protocol"], + "--strict-policies", + strict_policies, + "--rirs", + ",".join(rirs), + "--argv-json", + json.dumps([]), + "--env-json", + json.dumps({}), + "--cwd", + str(local_exp_root), + "--state-root", + str(local_exp_root / side_label / "state"), + "--db", + str(local_exp_root / side_label / "state" / "ours" / "work-db"), + "--repo-bytes-db", + str(local_exp_root / side_label / "state" / "ours" / "repo-bytes.db"), + "--raw-store-db", + str(local_exp_root / side_label / "state" / "ours" / "raw-store.db"), + "--rsync-mirror-root", + str(local_exp_root / side_label / "state" / "ours" / "rsync-mirror"), + "--cache-root", + str(local_exp_root / side_label / "state" / "rpki-client" / "cache"), + "--ccr", + str(result_ccr), + "--cir", + str(result_cir), + "--fixture-proof", + str(fixture_proof), + "--fixture-proof-summary-json", + json.dumps( + { + "taFixturePinned": True, + "taOnlineFetchObserved": False, + "trustAnchorCount": len(rirs), + } + ), + "--report-json", + str(report_json), + "--stage-timing-json", + str(stage_timing_json), + "--stdout-log", + str(stdout_log), + "--stderr-log", + str(stderr_log), + "--process-time", + str(process_time), + "--exit-code", + str(exit_code), + "--wall-ms", + str(int(time_info.get("wallMs", 0))), + "--max-rss-kb", + str(int(time_info.get("maxRssKb", 0))), + "--vrps", + str(int(counts.get("vrps", 0))), + "--vaps", + str(int(counts.get("aspas", 0))), + "--publication-points", + str(int(counts.get("publicationPoints", counts.get("repositories", 0)))), + "--warnings", + str(int(counts.get("warnings", 0))), + "--cir-object-count", + str(int(counts.get("cirObjectCount", 0))), + "--cir-reject-count", + str(int(counts.get("cirRejectCount", 0))), + "--cir-trust-anchor-count", + str(int(counts.get("cirTrustAnchorCount", len(rirs)))), + "--host", + os.uname().nodename, + "--platform", + sys.platform, + ] + if step == "snapshot": + insert_at = generate_meta_args.index("--state-root") + generate_meta_args.insert(insert_at, "--reset-before-run") + run_local(generate_meta_args) + return meta_path + + +def run_experiment( + ssh_target: str, + local_run_root: Path, + remote_root: Path, + exp: dict[str, Any], + rirs: list[str], + dry_run: bool = False, +) -> dict[str, Any]: + exp_id = exp["id"] + local_exp_root = local_run_root / "experiments" / exp_id + remote_exp_root = remote_root / "experiments" / exp_id + ensure_dir(local_exp_root) + if dry_run: + return render_experiment_plan(exp, local_run_root, remote_root, ssh_target, rirs) + + fixture_proof = build_fixture_proof(local_exp_root, rirs) + preflight = ( + "set -euo pipefail; " + f"df -h /data / || true; " + "systemctl disable --now rpki-client.timer >/dev/null 2>&1 || true; " + "systemctl stop rpki-client.service >/dev/null 2>&1 || true; " + "pkill -f '[/]rpki-client([[:space:]]|$)' >/dev/null 2>&1 || true; " + "pkill -f '[/]routinator([[:space:]]|$)' >/dev/null 2>&1 || true; " + "id -u _rpki-client >/dev/null 2>&1 || useradd -r -M -s /usr/sbin/nologin _rpki-client || true; " + f"mkdir -p {shlex.quote(str(remote_root / 'bin'))} {shlex.quote(str(remote_root / 'lib'))} {shlex.quote(str(remote_root / 'fixtures' / 'tal'))} {shlex.quote(str(remote_root / 'fixtures' / 'ta'))} {shlex.quote(str(remote_exp_root))}" + ) + ssh_script(ssh_target, preflight) + rsync_dir_to_remote(ssh_target, REPO_ROOT / "tests" / "fixtures" / "tal", remote_root / "fixtures" / "tal") + rsync_dir_to_remote(ssh_target, REPO_ROOT / "tests" / "fixtures" / "ta", remote_root / "fixtures" / "ta") + + if exp["left"]["rpKind"] == "rpki-client" or exp["right"]["rpKind"] == "rpki-client": + rsync_to_remote(ssh_target, detect_libtls_path(PORTABLE_ROOT / "src" / "rpki-client"), str(remote_root / "lib" / "libtls.so.28")) + rsync_to_remote(ssh_target, PORTABLE_ROOT / "src" / "rpki-client", str(remote_root / "bin" / "rpki-client")) + rsync_to_remote(ssh_target, REPO_ROOT / "target" / "release" / "rpki", str(remote_root / "bin" / "rpki")) + rsync_to_remote(ssh_target, REPO_ROOT / "target" / "release" / "triage_ccr_cir_pair", str(remote_root / "bin" / "triage_ccr_cir_pair")) + rsync_to_remote(ssh_target, REPO_ROOT / "target" / "release" / "cir_dump_reject_list", str(remote_root / "bin" / "cir_dump_reject_list")) + + step_results: list[dict[str, Any]] = [] + for step in ("snapshot", "delta"): + for side_label, side in (("A", exp["left"]), ("B", exp["right"])): + run_remote_step(ssh_target, remote_root, exp_id, side_label, step, side, rirs) + local_step_root = local_exp_root / side_label / step + remote_step_root = remote_exp_root / side_label / step + rsync_from_remote(ssh_target, str(remote_step_root), local_step_root) + if side["rpKind"] == "rpki-client": + copy_rpki_client_outputs(local_step_root) + write_rpki_client_stage_timing(local_step_root) + # ensure ours has a stable stage timing artifact too + if not (local_step_root / "stage-timing.json").is_file() and (local_step_root / "report.json").is_file(): + report = load_json(local_step_root / "report.json") + stage_timing = { + "tool": side["rpKind"], + "counts": { + "publicationPoints": len(report.get("publication_points", [])), + "vrps": len(report.get("vrps", [])), + "aspas": len(report.get("aspas", [])), + }, + } + write_json(local_step_root / "stage-timing.json", stage_timing) + + meta_path = generate_run_meta( + local_exp_root, + exp_id, + side_label, + step, + side, + rirs, + fixture_proof, + local_step_root, + ) + step_results.append( + { + "side": side_label, + "step": step, + "runDir": str(local_step_root), + "meta": str(meta_path), + } + ) + + compare_dir = local_exp_root / "compare" / step + ensure_dir(compare_dir) + triage_cmd = [ + str(REPO_ROOT / "target" / "release" / "triage_ccr_cir_pair"), + "--left-ccr", + str(local_exp_root / "A" / step / "result.ccr"), + "--left-cir", + str(local_exp_root / "A" / step / "result.cir"), + "--left-meta", + str(local_exp_root / "A" / step / "run-meta.json"), + "--right-ccr", + str(local_exp_root / "B" / step / "result.ccr"), + "--right-cir", + str(local_exp_root / "B" / step / "result.cir"), + "--right-meta", + str(local_exp_root / "B" / step / "run-meta.json"), + "--out-dir", + str(compare_dir), + "--sample-limit", + "200", + "--compare-view-trust-anchor", + compare_view_trust_anchor(rirs), + ] + run_local(triage_cmd, cwd=local_exp_root) + + step_summary = { + "step": step, + "compareDir": str(compare_dir), + "triage": load_json(compare_dir / "triage.json"), + } + write_json(local_exp_root / "compare" / f"{step}.summary.json", step_summary) + step_results.append(step_summary) + + experiment_summary = { + "schemaVersion": 1, + "experimentId": exp_id, + "left": exp["left"], + "right": exp["right"], + "rirs": rirs, + "steps": step_results, + } + write_json(local_exp_root / "experiment-summary.json", experiment_summary) + with (local_exp_root / "experiment-summary.md").open("w", encoding="utf-8") as handle: + handle.write(f"# {exp_id}\n\n") + handle.write(json.dumps(experiment_summary, indent=2, ensure_ascii=False)) + handle.write("\n") + + rsync_dir_to_remote(ssh_target, local_exp_root, str(remote_exp_root)) + return experiment_summary + + +def is_rpki_client_experiment(exp: dict[str, Any]) -> bool: + return exp["left"]["rpKind"] == "rpki-client" or exp["right"]["rpKind"] == "rpki-client" + + +def main() -> None: + parser = argparse.ArgumentParser(description="Feature #035 experiment driver") + parser.add_argument("--run-root", required=True) + parser.add_argument("--remote-root", required=True) + parser.add_argument("--ssh-target", default=os.environ.get("SSH_TARGET", "root@47.251.56.108")) + parser.add_argument("--experiment", action="append", help="Experiment id to run; repeatable") + parser.add_argument("--all", action="store_true", help="Run all experiments from experiments.json") + parser.add_argument("--dry-run", action="store_true") + parser.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe") + args = parser.parse_args() + + experiments_doc = load_json(EXPERIMENTS_PATH) + experiments = experiments_doc["experiments"] + selected_ids = set(args.experiment or []) + if args.all or not selected_ids: + selected = experiments + else: + selected = [exp for exp in experiments if exp["id"] in selected_ids] + if not selected: + raise SystemExit("no experiments selected") + + rirs = [item.strip() for item in args.rirs.split(",") if item.strip()] + if not rirs: + raise SystemExit("--rirs must not be empty") + + run_root = Path(args.run_root).resolve() + remote_root = Path(args.remote_root) + ensure_dir(run_root) + if not args.dry_run: + build_tool_binaries() + + if args.dry_run: + plans = [ + run_experiment(args.ssh_target, run_root, remote_root, exp, rirs, dry_run=True) + for exp in selected + ] + print(json.dumps({"schemaVersion": 1, "dryRun": True, "experiments": plans}, indent=2, ensure_ascii=False)) + return + + summary = { + "schemaVersion": 1, + "generatedAtUtc": utc_stamp(), + "runRoot": str(run_root), + "remoteRoot": str(remote_root), + "sshTarget": args.ssh_target, + "experiments": [], + } + for exp in selected: + summary["experiments"].append( + run_experiment(args.ssh_target, run_root, remote_root, exp, rirs, dry_run=False) + ) + write_json(run_root / "feature035-summary.json", summary) + print(json.dumps(summary, indent=2, ensure_ascii=False)) + + +if __name__ == "__main__": + main() diff --git a/src/bin/rpki_object_parse.rs b/src/bin/rpki_object_parse.rs new file mode 100644 index 0000000..1a75207 --- /dev/null +++ b/src/bin/rpki_object_parse.rs @@ -0,0 +1,595 @@ +use std::net::{Ipv4Addr, Ipv6Addr}; +use std::path::{Path, PathBuf}; + +use rpki::data_model::aspa::AspaObject; +use rpki::data_model::crl::RpkixCrl; +use rpki::data_model::manifest::ManifestObject; +use rpki::data_model::rc::{ + AccessDescription, RcExtensions, ResourceCertificate, SubjectInfoAccess, +}; +use rpki::data_model::roa::{IpPrefix as RoaIpPrefix, RoaAfi, RoaObject}; +use rpki::data_model::signed_object::{ + ResourceEeCertificate, RpkiSignedObject, SignedAttrsProfiled, SignerInfoProfiled, +}; +use rpki::data_model::ta::TaCertificate; +use serde_json::{Value, json}; +use sha2::{Digest, Sha256}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum ObjectType { + Auto, + Cer, + Mft, + Crl, + Roa, + Aspa, +} + +#[derive(Debug, PartialEq, Eq)] +struct Args { + object_type: ObjectType, + input_path: Option, + out_path: Option, + pretty: bool, + entry_limit: usize, +} + +impl Default for Args { + fn default() -> Self { + Self { + object_type: ObjectType::Auto, + input_path: None, + out_path: None, + pretty: true, + entry_limit: 50, + } + } +} + +fn usage() -> &'static str { + "Usage: rpki_object_parse --type auto|cer|mft|crl|roa|asa|aspa --input [--out ] [--entry-limit ] [--compact]" +} + +fn main() { + if let Err(err) = real_main() { + eprintln!("{err}"); + std::process::exit(1); + } +} + +fn real_main() -> Result<(), String> { + let args = parse_args(&std::env::args().collect::>())?; + let input_path = args.input_path.as_ref().expect("validated"); + let bytes = std::fs::read(input_path) + .map_err(|e| format!("read input failed: {}: {e}", input_path.display()))?; + let object_type = resolve_object_type(args.object_type, input_path)?; + let parsed = parse_object_json(object_type, input_path, &bytes, args.entry_limit); + let rendered = if args.pretty { + serde_json::to_string_pretty(&parsed).map_err(|e| e.to_string())? + } else { + serde_json::to_string(&parsed).map_err(|e| e.to_string())? + }; + + if let Some(out_path) = args.out_path.as_ref() { + if let Some(parent) = out_path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("create output parent failed: {}: {e}", parent.display()))?; + } + std::fs::write(out_path, rendered) + .map_err(|e| format!("write output failed: {}: {e}", out_path.display()))?; + } else { + println!("{rendered}"); + } + Ok(()) +} + +fn parse_args(argv: &[String]) -> Result { + let mut args = Args::default(); + let mut index = 1usize; + while index < argv.len() { + match argv[index].as_str() { + "--type" => { + index += 1; + let value = argv.get(index).ok_or("--type requires a value")?; + args.object_type = parse_object_type(value)?; + } + "--input" | "--in" => { + index += 1; + args.input_path = Some(PathBuf::from( + argv.get(index).ok_or("--input requires a value")?, + )); + } + "--out" => { + index += 1; + args.out_path = Some(PathBuf::from( + argv.get(index).ok_or("--out requires a value")?, + )); + } + "--entry-limit" => { + index += 1; + let value = argv.get(index).ok_or("--entry-limit requires a value")?; + args.entry_limit = parse_limit(value)?; + } + "--all" => { + args.entry_limit = usize::MAX; + } + "--compact" => { + args.pretty = false; + } + "--pretty" => { + args.pretty = true; + } + "-h" | "--help" => return Err(usage().to_string()), + other => return Err(format!("unknown argument: {other}\n{}", usage())), + } + index += 1; + } + if args.input_path.is_none() { + return Err(format!("--input is required\n{}", usage())); + } + Ok(args) +} + +fn parse_object_type(value: &str) -> Result { + match value.to_ascii_lowercase().as_str() { + "auto" => Ok(ObjectType::Auto), + "cer" | ".cer" | "cert" | "certificate" => Ok(ObjectType::Cer), + "mft" | ".mft" | "manifest" => Ok(ObjectType::Mft), + "crl" | ".crl" => Ok(ObjectType::Crl), + "roa" | ".roa" => Ok(ObjectType::Roa), + "asa" | ".asa" | "aspa" => Ok(ObjectType::Aspa), + _ => Err(format!("unsupported --type: {value}\n{}", usage())), + } +} + +fn parse_limit(value: &str) -> Result { + if value.eq_ignore_ascii_case("all") { + return Ok(usize::MAX); + } + value + .parse::() + .map_err(|_| format!("invalid --entry-limit: {value}")) +} + +fn resolve_object_type(object_type: ObjectType, path: &Path) -> Result { + if object_type != ObjectType::Auto { + return Ok(object_type); + } + match path + .extension() + .and_then(|v| v.to_str()) + .map(|v| v.to_ascii_lowercase()) + .as_deref() + { + Some("cer") => Ok(ObjectType::Cer), + Some("mft") => Ok(ObjectType::Mft), + Some("crl") => Ok(ObjectType::Crl), + Some("roa") => Ok(ObjectType::Roa), + Some("asa") | Some("aspa") => Ok(ObjectType::Aspa), + _ => Err(format!( + "cannot infer object type from path: {}", + path.display() + )), + } +} + +fn parse_object_json( + object_type: ObjectType, + input_path: &Path, + bytes: &[u8], + entry_limit: usize, +) -> Value { + let object = match object_type { + ObjectType::Auto => unreachable!("auto must be resolved"), + ObjectType::Cer => parse_cer_json(bytes), + ObjectType::Mft => parse_mft_json(bytes, entry_limit), + ObjectType::Crl => parse_crl_json(bytes, entry_limit), + ObjectType::Roa => parse_roa_json(bytes, entry_limit), + ObjectType::Aspa => parse_aspa_json(bytes, entry_limit), + }; + json!({ + "tool": "rpki_object_parse", + "schemaVersion": 1, + "input": { + "path": input_path.display().to_string(), + "type": object_type_label(object_type), + "bytes": bytes_summary(bytes), + }, + "object": object, + }) +} + +fn parse_cer_json(bytes: &[u8]) -> Value { + match ResourceCertificate::decode_der(bytes) { + Ok(cert) => { + let ta_profile = match TaCertificate::decode_der(bytes) { + Ok(ta) => json!({ + "valid": true, + "selfSignature": result_json(ta.verify_self_signature().map_err(|e| e.to_string())), + }), + Err(err) => json!({ + "valid": false, + "error": err.to_string(), + }), + }; + json!({ + "type": "cer", + "decode": {"profileValid": true}, + "resourceCertificate": resource_certificate_json(&cert), + "trustAnchorProfile": ta_profile, + }) + } + Err(err) => json!({ + "type": "cer", + "decode": {"profileValid": false, "error": err.to_string()}, + }), + } +} + +fn parse_mft_json(bytes: &[u8], entry_limit: usize) -> Value { + match ManifestObject::decode_der(bytes) { + Ok(mft) => { + let files = mft.manifest.parse_files(); + let (file_sample, file_list_error) = match files { + Ok(entries) => ( + json!({ + "count": entries.len(), + "truncated": entries.len() > entry_limit, + "entries": entries.iter().take(entry_limit).map(|item| { + json!({"fileName": item.file_name, "hashHex": hex::encode(item.hash_bytes)}) + }).collect::>(), + }), + Value::Null, + ), + Err(err) => (Value::Null, json!(err.to_string())), + }; + json!({ + "type": "mft", + "decode": {"profileValid": true}, + "eContentType": mft.econtent_type, + "signedObject": signed_object_json(&mft.signed_object), + "manifest": { + "version": mft.manifest.version, + "manifestNumberHex": mft.manifest.manifest_number.to_hex_upper(), + "thisUpdate": format_time(mft.manifest.this_update), + "nextUpdate": format_time(mft.manifest.next_update), + "fileHashAlg": mft.manifest.file_hash_alg, + "fileCount": mft.manifest.file_count(), + "fileList": file_sample, + "fileListError": file_list_error, + }, + "embeddedEeProfile": result_json(mft.validate_embedded_ee_cert().map_err(|e| e.to_string())), + "cmsSignature": result_json(mft.signed_object.verify_signature().map_err(|e| e.to_string())), + }) + } + Err(err) => json!({ + "type": "mft", + "decode": {"profileValid": false, "error": err.to_string()}, + }), + } +} + +fn parse_crl_json(bytes: &[u8], entry_limit: usize) -> Value { + match RpkixCrl::decode_der(bytes) { + Ok(crl) => json!({ + "type": "crl", + "decode": {"profileValid": true}, + "rawDer": bytes_summary(&crl.raw_der), + "version": crl.version, + "issuer": crl.issuer_dn, + "signatureAlgorithm": crl.signature_algorithm_oid, + "thisUpdate": format_time(crl.this_update.utc), + "nextUpdate": format_time(crl.next_update.utc), + "extensions": { + "authorityKeyIdentifier": hex::encode(&crl.extensions.authority_key_identifier), + "crlNumberHex": crl.extensions.crl_number.to_hex_upper(), + "crlNumber": crl.extensions.crl_number.to_u64(), + }, + "revokedCertificates": { + "count": crl.revoked_certs.len(), + "truncated": crl.revoked_certs.len() > entry_limit, + "entries": crl.revoked_certs.iter().take(entry_limit).map(|item| { + json!({ + "serialNumberHex": item.serial_number.to_hex_upper(), + "serialNumber": item.serial_number.to_u64(), + "revocationDate": format_time(item.revocation_date.utc), + }) + }).collect::>(), + }, + }), + Err(err) => json!({ + "type": "crl", + "decode": {"profileValid": false, "error": err.to_string()}, + }), + } +} + +fn parse_roa_json(bytes: &[u8], entry_limit: usize) -> Value { + match RoaObject::decode_der(bytes) { + Ok(roa) => json!({ + "type": "roa", + "decode": {"profileValid": true}, + "eContentType": roa.econtent_type, + "signedObject": signed_object_json(&roa.signed_object), + "roa": { + "version": roa.roa.version, + "asId": roa.roa.as_id, + "ipAddressFamilies": roa.roa.ip_addr_blocks.iter().map(|family| { + json!({ + "afi": format!("{:?}", family.afi), + "addressCount": family.addresses.len(), + "truncated": family.addresses.len() > entry_limit, + "addresses": family.addresses.iter().take(entry_limit).map(|entry| { + json!({ + "prefix": roa_prefix_string(&entry.prefix), + "maxLength": entry.max_length, + }) + }).collect::>(), + }) + }).collect::>(), + }, + "embeddedEeProfile": result_json(roa.validate_embedded_ee_cert().map_err(|e| e.to_string())), + "cmsSignature": result_json(roa.signed_object.verify_signature().map_err(|e| e.to_string())), + }), + Err(err) => json!({ + "type": "roa", + "decode": {"profileValid": false, "error": err.to_string()}, + }), + } +} + +fn parse_aspa_json(bytes: &[u8], entry_limit: usize) -> Value { + match AspaObject::decode_der(bytes) { + Ok(aspa) => json!({ + "type": "aspa", + "decode": {"profileValid": true}, + "eContentType": aspa.econtent_type, + "signedObject": signed_object_json(&aspa.signed_object), + "aspa": { + "version": aspa.aspa.version, + "customerAsId": aspa.aspa.customer_as_id, + "providerCount": aspa.aspa.provider_as_ids.len(), + "providersTruncated": aspa.aspa.provider_as_ids.len() > entry_limit, + "providerAsIds": aspa.aspa.provider_as_ids.iter().take(entry_limit).copied().collect::>(), + }, + "embeddedEeProfile": result_json(aspa.validate_embedded_ee_cert().map_err(|e| e.to_string())), + "cmsSignature": result_json(aspa.signed_object.verify_signature().map_err(|e| e.to_string())), + }), + Err(err) => json!({ + "type": "aspa", + "decode": {"profileValid": false, "error": err.to_string()}, + }), + } +} + +fn resource_certificate_json(cert: &ResourceCertificate) -> Value { + let tbs = &cert.tbs; + json!({ + "rawDer": bytes_summary(&cert.raw_der), + "kind": format!("{:?}", cert.kind), + "version": tbs.version, + "serialNumberHex": hex::encode(tbs.serial_number.to_bytes_be()), + "signatureAlgorithm": tbs.signature_algorithm, + "issuer": tbs.issuer_name.to_string(), + "subject": tbs.subject_name.to_string(), + "validity": { + "notBefore": format_time(tbs.validity_not_before), + "notAfter": format_time(tbs.validity_not_after), + }, + "subjectPublicKeyInfo": bytes_summary(&tbs.subject_public_key_info), + "extensions": rc_extensions_json(&tbs.extensions), + }) +} + +fn rc_extensions_json(ext: &RcExtensions) -> Value { + json!({ + "basicConstraintsCa": ext.basic_constraints_ca, + "subjectKeyIdentifier": ext.subject_key_identifier.as_ref().map(|v| hex::encode(v)), + "authorityKeyIdentifier": ext.authority_key_identifier.as_ref().map(|v| hex::encode(v)), + "crlDistributionPointsUris": ext.crl_distribution_points_uris, + "caIssuersUris": ext.ca_issuers_uris, + "subjectInfoAccess": subject_info_access_json(ext.subject_info_access.as_ref()), + "certificatePoliciesOid": ext.certificate_policies_oid, + "ipResources": serde_json::to_value(&ext.ip_resources).unwrap_or(Value::Null), + "asResources": serde_json::to_value(&ext.as_resources).unwrap_or(Value::Null), + }) +} + +fn subject_info_access_json(value: Option<&SubjectInfoAccess>) -> Value { + match value { + None => Value::Null, + Some(SubjectInfoAccess::Ca(ca)) => json!({ + "kind": "ca", + "accessDescriptions": ca.access_descriptions.iter().map(access_description_json).collect::>(), + }), + Some(SubjectInfoAccess::Ee(ee)) => json!({ + "kind": "ee", + "signedObjectUris": ee.signed_object_uris, + "accessDescriptions": ee.access_descriptions.iter().map(access_description_json).collect::>(), + }), + } +} + +fn access_description_json(value: &AccessDescription) -> Value { + json!({ + "accessMethodOid": value.access_method_oid, + "accessLocation": value.access_location, + }) +} + +fn signed_object_json(signed_object: &RpkiSignedObject) -> Value { + let signed_data = &signed_object.signed_data; + json!({ + "rawDer": bytes_summary(&signed_object.raw_der), + "contentInfoContentType": signed_object.content_info_content_type, + "signedData": { + "version": signed_data.version, + "digestAlgorithms": signed_data.digest_algorithms, + "encapContentInfo": { + "eContentType": signed_data.encap_content_info.econtent_type, + "eContent": bytes_summary(&signed_data.encap_content_info.econtent), + }, + "certificates": signed_data.certificates.iter().map(ee_certificate_json).collect::>(), + "crlsPresent": signed_data.crls_present, + "signerInfos": signed_data.signer_infos.iter().map(signer_info_json).collect::>(), + }, + }) +} + +fn ee_certificate_json(cert: &ResourceEeCertificate) -> Value { + json!({ + "rawDer": bytes_summary(&cert.raw_der), + "subjectKeyIdentifier": hex::encode(&cert.subject_key_identifier), + "spkiDer": bytes_summary(&cert.spki_der), + "rsaPublicKey": { + "modulus": bytes_summary(&cert.rsa_public_modulus), + "exponent": bytes_summary(&cert.rsa_public_exponent), + }, + "tbsCertificate": bytes_summary(&cert.tbs_certificate_der), + "certificateSignature": bytes_summary(&cert.signature_bytes), + "keyUsageSummary": format!("{:?}", cert.key_usage_summary), + "siaSignedObjectUris": cert.sia_signed_object_uris, + "resourceCertificate": resource_certificate_json(&cert.resource_cert), + }) +} + +fn signer_info_json(info: &SignerInfoProfiled) -> Value { + json!({ + "version": info.version, + "sidSki": hex::encode(&info.sid_ski), + "digestAlgorithm": info.digest_algorithm, + "signatureAlgorithm": info.signature_algorithm, + "signedAttrs": signed_attrs_json(&info.signed_attrs), + "unsignedAttrsPresent": info.unsigned_attrs_present, + "signature": bytes_summary(&info.signature), + "signedAttrsDerForSignature": bytes_summary(&info.signed_attrs_der_for_signature), + }) +} + +fn signed_attrs_json(attrs: &SignedAttrsProfiled) -> Value { + json!({ + "contentType": attrs.content_type, + "messageDigest": hex::encode(&attrs.message_digest), + "signingTime": { + "utc": format_time(attrs.signing_time.utc), + "encoding": format!("{:?}", attrs.signing_time.encoding), + }, + "otherAttrsPresent": attrs.other_attrs_present, + }) +} + +fn result_json(result: Result<(), String>) -> Value { + match result { + Ok(()) => json!({"valid": true}), + Err(err) => json!({"valid": false, "error": err}), + } +} + +fn object_type_label(object_type: ObjectType) -> &'static str { + match object_type { + ObjectType::Auto => "auto", + ObjectType::Cer => "cer", + ObjectType::Mft => "mft", + ObjectType::Crl => "crl", + ObjectType::Roa => "roa", + ObjectType::Aspa => "aspa", + } +} + +fn bytes_summary(bytes: &[u8]) -> Value { + let head_len = bytes.len().min(16); + let tail_len = bytes.len().min(16); + json!({ + "len": bytes.len(), + "sha256": sha256_hex(bytes), + "headHex": hex::encode(&bytes[..head_len]), + "tailHex": hex::encode(&bytes[bytes.len().saturating_sub(tail_len)..]), + }) +} + +fn sha256_hex(bytes: &[u8]) -> String { + hex::encode(Sha256::digest(bytes)) +} + +fn format_time(value: time::OffsetDateTime) -> String { + value + .to_offset(time::UtcOffset::UTC) + .format(&time::format_description::well_known::Rfc3339) + .unwrap_or_else(|_| value.unix_timestamp().to_string()) +} + +fn roa_prefix_string(prefix: &RoaIpPrefix) -> String { + let bytes = prefix.addr_bytes(); + match prefix.afi { + RoaAfi::Ipv4 => { + let octets = [bytes[0], bytes[1], bytes[2], bytes[3]]; + format!("{}/{}", Ipv4Addr::from(octets), prefix.prefix_len) + } + RoaAfi::Ipv6 => { + let mut octets = [0u8; 16]; + octets.copy_from_slice(bytes); + format!("{}/{}", Ipv6Addr::from(octets), prefix.prefix_len) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_args_accepts_plan_shape() { + let args = parse_args(&[ + "rpki_object_parse".to_string(), + "--type".to_string(), + "auto".to_string(), + "--input".to_string(), + "a.roa".to_string(), + "--out".to_string(), + "parsed.json".to_string(), + "--entry-limit".to_string(), + "5".to_string(), + ]) + .expect("parse args"); + assert_eq!(args.object_type, ObjectType::Auto); + assert_eq!(args.input_path.as_deref(), Some(Path::new("a.roa"))); + assert_eq!(args.out_path.as_deref(), Some(Path::new("parsed.json"))); + assert_eq!(args.entry_limit, 5); + } + + #[test] + fn parse_args_accepts_aspa_alias_and_all_limit() { + let args = parse_args(&[ + "rpki_object_parse".to_string(), + "--type".to_string(), + "asa".to_string(), + "--in".to_string(), + "a.asa".to_string(), + "--entry-limit".to_string(), + "all".to_string(), + "--compact".to_string(), + ]) + .expect("parse args"); + assert_eq!(args.object_type, ObjectType::Aspa); + assert_eq!(args.entry_limit, usize::MAX); + assert!(!args.pretty); + } + + #[test] + fn resolve_auto_type_from_extension() { + assert_eq!( + resolve_object_type(ObjectType::Auto, Path::new("a.mft")).expect("resolve"), + ObjectType::Mft + ); + assert_eq!( + resolve_object_type(ObjectType::Auto, Path::new("a.asa")).expect("resolve"), + ObjectType::Aspa + ); + } + + #[test] + fn unknown_auto_type_is_rejected() { + let err = resolve_object_type(ObjectType::Auto, Path::new("a.bin")).unwrap_err(); + assert!(err.contains("cannot infer object type"), "{err}"); + } +} diff --git a/src/bin/triage_ccr_cir_pair.rs b/src/bin/triage_ccr_cir_pair.rs new file mode 100644 index 0000000..f98e116 --- /dev/null +++ b/src/bin/triage_ccr_cir_pair.rs @@ -0,0 +1,1412 @@ +use std::collections::{BTreeMap, BTreeSet}; +use std::path::{Path, PathBuf}; + +use rpki::ccr::{compare_state_digests, decode_content_info}; +use rpki::cir::decode_cir; +use serde_json::{Value, json}; + +#[derive(Debug, PartialEq, Eq)] +struct Args { + left_ccr: PathBuf, + left_cir: PathBuf, + left_meta: PathBuf, + right_ccr: PathBuf, + right_cir: PathBuf, + right_meta: PathBuf, + out_dir: PathBuf, + sample_limit: usize, + compare_view_trust_anchor: String, +} + +fn usage() -> &'static str { + "Usage: triage_ccr_cir_pair --left-ccr --left-cir --left-meta --right-ccr --right-cir --right-meta --out-dir [--sample-limit ] [--compare-view-trust-anchor ]" +} + +fn main() { + if let Err(err) = real_main() { + eprintln!("{err}"); + std::process::exit(1); + } +} + +fn real_main() -> Result<(), String> { + let args = parse_args(&std::env::args().collect::>())?; + run(args) +} + +fn parse_args(argv: &[String]) -> Result { + let mut left_ccr = None; + let mut left_cir = None; + let mut left_meta = None; + let mut right_ccr = None; + let mut right_cir = None; + let mut right_meta = None; + let mut out_dir = None; + let mut sample_limit = 200usize; + let mut compare_view_trust_anchor = "unknown".to_string(); + let mut index = 1usize; + while index < argv.len() { + match argv[index].as_str() { + "--left-ccr" => { + index += 1; + left_ccr = Some(PathBuf::from( + argv.get(index).ok_or("--left-ccr requires a value")?, + )); + } + "--left-cir" => { + index += 1; + left_cir = Some(PathBuf::from( + argv.get(index).ok_or("--left-cir requires a value")?, + )); + } + "--left-meta" => { + index += 1; + left_meta = Some(PathBuf::from( + argv.get(index).ok_or("--left-meta requires a value")?, + )); + } + "--right-ccr" => { + index += 1; + right_ccr = Some(PathBuf::from( + argv.get(index).ok_or("--right-ccr requires a value")?, + )); + } + "--right-cir" => { + index += 1; + right_cir = Some(PathBuf::from( + argv.get(index).ok_or("--right-cir requires a value")?, + )); + } + "--right-meta" => { + index += 1; + right_meta = Some(PathBuf::from( + argv.get(index).ok_or("--right-meta requires a value")?, + )); + } + "--out-dir" => { + index += 1; + out_dir = Some(PathBuf::from( + argv.get(index).ok_or("--out-dir requires a value")?, + )); + } + "--sample-limit" => { + index += 1; + let value = argv.get(index).ok_or("--sample-limit requires a value")?; + sample_limit = value + .parse::() + .map_err(|_| format!("invalid --sample-limit: {value}"))?; + } + "--compare-view-trust-anchor" => { + index += 1; + compare_view_trust_anchor = argv + .get(index) + .ok_or("--compare-view-trust-anchor requires a value")? + .clone(); + } + "-h" | "--help" => return Err(usage().to_string()), + other => return Err(format!("unknown argument: {other}\n{}", usage())), + } + index += 1; + } + Ok(Args { + left_ccr: left_ccr.ok_or_else(|| format!("--left-ccr is required\n{}", usage()))?, + left_cir: left_cir.ok_or_else(|| format!("--left-cir is required\n{}", usage()))?, + left_meta: left_meta.ok_or_else(|| format!("--left-meta is required\n{}", usage()))?, + right_ccr: right_ccr.ok_or_else(|| format!("--right-ccr is required\n{}", usage()))?, + right_cir: right_cir.ok_or_else(|| format!("--right-cir is required\n{}", usage()))?, + right_meta: right_meta.ok_or_else(|| format!("--right-meta is required\n{}", usage()))?, + out_dir: out_dir.ok_or_else(|| format!("--out-dir is required\n{}", usage()))?, + sample_limit, + compare_view_trust_anchor, + }) +} + +fn run(args: Args) -> Result<(), String> { + std::fs::create_dir_all(&args.out_dir) + .map_err(|e| format!("create out-dir failed: {}: {e}", args.out_dir.display()))?; + let left_meta = read_json(&args.left_meta)?; + let right_meta = read_json(&args.right_meta)?; + let ccr_summary = build_ccr_summary(&args)?; + let cir_summary = build_cir_summary(&args, &left_meta, &right_meta)?; + let input_integrity = build_input_integrity(&args, &left_meta, &right_meta); + let diagnosis = diagnose(&ccr_summary, &cir_summary, &input_integrity); + let triage = json!({ + "schemaVersion": 1, + "generatedBy": "triage_ccr_cir_pair", + "left": side_summary("left", &args.left_ccr, &args.left_cir, &args.left_meta, &left_meta), + "right": side_summary("right", &args.right_ccr, &args.right_cir, &args.right_meta, &right_meta), + "sampleLimit": args.sample_limit, + "diagnosis": diagnosis, + "primaryDiagnosis": diagnosis, + "allMatch": diagnosis == "same_state", + "inputIntegrity": input_integrity, + "ccr": ccr_summary, + "cir": cir_summary, + "manualInvestigationHints": manual_hints(&diagnosis), + }); + write_json(&args.out_dir.join("ccr-summary.json"), &triage["ccr"])?; + write_json(&args.out_dir.join("cir-summary.json"), &triage["cir"])?; + write_json(&args.out_dir.join("triage.json"), &triage)?; + write_markdown(&args.out_dir.join("triage.md"), &triage)?; + write_samples_jsonl(&args.out_dir.join("diff-samples.jsonl"), &triage)?; + println!("{}", args.out_dir.display()); + Ok(()) +} + +fn build_ccr_summary(args: &Args) -> Result { + let left = read_file(&args.left_ccr)?; + let right = read_file(&args.right_ccr)?; + let comparison = compare_state_digests(&left, &right).map_err(|e| e.to_string())?; + let state_digest_match = comparison.matches(); + let mismatched_states = comparison.mismatched_state_names(); + let mut mismatched_components = Vec::new(); + if comparison.ours.version != comparison.peer.version { + mismatched_components.push("version".to_string()); + } + if comparison.ours.hash_alg_oid != comparison.peer.hash_alg_oid { + mismatched_components.push("hashAlgorithm".to_string()); + } + mismatched_components.extend(mismatched_states.iter().map(|item| (*item).to_string())); + let fallback = if state_digest_match { + None + } else { + Some(build_compare_view_summary( + &left, + &right, + &args.compare_view_trust_anchor, + args.sample_limit, + )?) + }; + Ok(json!({ + "stateDigestMatch": state_digest_match, + "comparePath": if state_digest_match { + "ccr_state_digest_match" + } else if fallback.as_ref().is_some_and(|item| item.vrps_match && item.vaps_match) { + "ccr_state_digest_mismatch_with_compare_views_match" + } else { + "ccr_state_digest_mismatch_with_set_diff" + }, + "mismatchedStates": mismatched_states, + "mismatchedComponents": mismatched_components, + "versionMatch": comparison.ours.version == comparison.peer.version, + "hashAlgorithmMatch": comparison.ours.hash_alg_oid == comparison.peer.hash_alg_oid, + "left": { + "version": comparison.ours.version, + "hashAlg": comparison.ours.hash_alg_oid, + }, + "right": { + "version": comparison.peer.version, + "hashAlg": comparison.peer.hash_alg_oid, + }, + "states": comparison.states.iter().map(|state| json!({ + "name": state.name, + "match": state.matches, + "leftPresent": state.ours_present, + "rightPresent": state.peer_present, + "leftHash": state.ours_hash_hex, + "rightHash": state.peer_hash_hex, + })).collect::>(), + "compareViews": fallback.map(CompareViewSummary::to_json), + })) +} + +struct CompareViewSummary { + vrps_match: bool, + vaps_match: bool, + json: Value, +} + +impl CompareViewSummary { + fn to_json(self) -> Value { + self.json + } +} + +fn build_compare_view_summary( + left_der: &[u8], + right_der: &[u8], + trust_anchor: &str, + sample_limit: usize, +) -> Result { + let left = decode_content_info(left_der).map_err(|e| e.to_string())?; + let right = decode_content_info(right_der).map_err(|e| e.to_string())?; + let (left_vrps, left_vaps) = + rpki::ccr::decode_ccr_compare_views(&left, trust_anchor).map_err(|e| e.to_string())?; + let (right_vrps, right_vaps) = + rpki::ccr::decode_ccr_compare_views(&right, trust_anchor).map_err(|e| e.to_string())?; + let vrps = compare_sets(&left_vrps, &right_vrps, sample_limit); + let vaps = compare_sets(&left_vaps, &right_vaps, sample_limit); + Ok(CompareViewSummary { + vrps_match: vrps.match_, + vaps_match: vaps.match_, + json: json!({ + "trustAnchor": trust_anchor, + "vrps": vrps.to_json_with(|row| json!([row.asn, row.ip_prefix, row.max_length, row.trust_anchor])), + "vaps": vaps.to_json_with(|row| json!([row.customer_asn, row.providers, row.trust_anchor])), + }), + }) +} + +fn build_cir_summary(args: &Args, left_meta: &Value, right_meta: &Value) -> Result { + let left = decode_cir(&read_file(&args.left_cir)?).map_err(|e| e.to_string())?; + let right = decode_cir(&read_file(&args.right_cir)?).map_err(|e| e.to_string())?; + let left_objects = left + .objects + .iter() + .map(|item| (item.rsync_uri.clone(), hex::encode(&item.sha256))) + .collect::>(); + let right_objects = right + .objects + .iter() + .map(|item| (item.rsync_uri.clone(), hex::encode(&item.sha256))) + .collect::>(); + let left_rejects = left + .rejected_objects + .iter() + .map(|item| item.object_uri.clone()) + .collect::>(); + let right_rejects = right + .rejected_objects + .iter() + .map(|item| item.object_uri.clone()) + .collect::>(); + let left_trust_anchors = left + .trust_anchors + .iter() + .map(|item| { + ( + item.ta_rsync_uri.clone(), + hex::encode(&item.ta_certificate_sha256), + ) + }) + .collect::>(); + let right_trust_anchors = right + .trust_anchors + .iter() + .map(|item| { + ( + item.ta_rsync_uri.clone(), + hex::encode(&item.ta_certificate_sha256), + ) + }) + .collect::>(); + let objects = compare_object_maps(&left_objects, &right_objects, args.sample_limit); + let rejects = compare_sets(&left_rejects, &right_rejects, args.sample_limit); + let trust_anchors = + compare_object_maps(&left_trust_anchors, &right_trust_anchors, args.sample_limit); + let left_fixture = fixture_ta_map(left_meta); + let right_fixture = fixture_ta_map(right_meta); + let left_fixture_match = trust_anchor_fixture_match(&left_trust_anchors, &left_fixture); + let right_fixture_match = trust_anchor_fixture_match(&right_trust_anchors, &right_fixture); + let reject_hash_match = left.reject_list_sha256 == right.reject_list_sha256; + Ok(json!({ + "allMatch": objects.match_ && rejects.match_ && trust_anchors.match_ && reject_hash_match, + "objects": objects.to_json(), + "rejects": rejects.to_json_with(|item| json!(item)), + "trustAnchors": trust_anchors.to_json(), + "trustAnchorFixture": { + "leftMatch": left_fixture_match, + "rightMatch": right_fixture_match, + }, + "rejectListSha256Match": reject_hash_match, + "left": { + "objectCount": left.objects.len(), + "rejectCount": left.rejected_objects.len(), + "trustAnchorCount": left.trust_anchors.len(), + "rejectListSha256": hex::encode(&left.reject_list_sha256), + "validationTime": left.validation_time.to_string(), + }, + "right": { + "objectCount": right.objects.len(), + "rejectCount": right.rejected_objects.len(), + "trustAnchorCount": right.trust_anchors.len(), + "rejectListSha256": hex::encode(&right.reject_list_sha256), + "validationTime": right.validation_time.to_string(), + }, + })) +} + +fn diagnose(ccr: &Value, cir: &Value, integrity: &Value) -> String { + if integrity["metadataValid"].as_bool() == Some(false) { + return "metadata_invalid".to_string(); + } + if integrity["runExitOk"].as_bool() == Some(false) { + return "run_failed".to_string(); + } + if integrity["taFixturePinned"].as_bool() == Some(false) { + return "ta_fixture_not_pinned".to_string(); + } + if ccr["stateDigestMatch"].as_bool().unwrap_or(false) + && cir["allMatch"].as_bool().unwrap_or(false) + { + return "same_state".to_string(); + } + if cir["trustAnchorFixture"]["leftMatch"].as_bool() == Some(false) + || cir["trustAnchorFixture"]["rightMatch"].as_bool() == Some(false) + { + return "ta_fixture_not_pinned".to_string(); + } + if cir["trustAnchors"]["match"].as_bool() != Some(true) { + return "trust_anchor_input_difference".to_string(); + } + let object_uri_diff = cir["objects"]["onlyInLeftCount"].as_u64().unwrap_or(0) > 0 + || cir["objects"]["onlyInRightCount"].as_u64().unwrap_or(0) > 0; + if object_uri_diff { + return "sync_input_object_difference".to_string(); + } + if cir["objects"]["hashMismatchCount"].as_u64().unwrap_or(0) > 0 { + return "sync_input_object_content_difference".to_string(); + } + if cir["rejects"]["match"].as_bool() != Some(true) + || cir["rejectListSha256Match"].as_bool() != Some(true) + { + return "validation_reject_policy_difference".to_string(); + } + if ccr["stateDigestMatch"].as_bool().unwrap_or(false) { + return "equivalent_output_process_difference".to_string(); + } + if ccr["compareViews"]["vrps"]["match"].as_bool() == Some(true) + && ccr["compareViews"]["vaps"]["match"].as_bool() == Some(true) + { + return "ccr_projection_or_encoding_difference".to_string(); + } + "unknown_needs_manual_object_analysis".to_string() +} + +fn manual_hints(diagnosis: &str) -> Vec { + match diagnosis { + "same_state" => vec!["CCR state digest matches; no object-level investigation needed.".into()], + "metadata_invalid" => vec![ + "Run metadata is incomplete or artifact paths do not match the explicit CLI inputs.".into(), + "Fix the experiment driver metadata envelope before interpreting RP behavior.".into(), + ], + "run_failed" => vec![ + "At least one side reported a non-zero run exit code in run-meta.json.".into(), + "Inspect the registered run logs outside standard triage before comparing behavior.".into(), + ], + "ta_fixture_not_pinned" => vec![ + "At least one side reported that the local TA fixture was not pinned.".into(), + "Fix fixture pinning before interpreting CCR/CIR differences.".into(), + ], + "trust_anchor_input_difference" => vec![ + "Compare CIR TrustAnchor entries and fixture-proof TAL/TA hashes.".into(), + "If fixture hashes differ, fix experiment input before analyzing RP behavior.".into(), + ], + "sync_input_object_difference" => vec![ + "Inspect CIR object URI/hash differences first; this points to sync/manifest consumption differences.".into(), + "Use a separate object parser workflow for selected URI samples if manual root cause analysis is needed.".into(), + ], + "sync_input_object_content_difference" => vec![ + "CIR object URI sets match but at least one object hash differs.".into(), + "For live all5 runs this usually points to repository view drift or sync content differences.".into(), + ], + "validation_reject_policy_difference" => vec![ + "Inspect reject-only URI sets; this points to validation policy or parser strictness differences.".into(), + "Reject reasons are advisory and are not part of reject-list digest comparison.".into(), + ], + "ccr_projection_or_encoding_difference" => vec![ + "CIR inputs match but CCR state differs; inspect CCR projection, ordering, or encoding.".into(), + ], + "equivalent_output_process_difference" => vec![ + "CCR state digest matches but CIR process inputs differ.".into(), + "Record this as a behavior difference even though the final CCR state is equivalent.".into(), + ], + _ => vec![ + "CCR and standard CIR triage cannot fully explain this mismatch.".into(), + "Use manual parser tools on selected objects outside the standard triage workflow.".into(), + ], + } +} + +fn side_summary(label: &str, ccr: &Path, cir: &Path, meta: &Path, meta_json: &Value) -> Value { + json!({ + "label": label, + "ccr": path_string(ccr), + "cir": path_string(cir), + "metadata": path_string(meta), + "experimentId": meta_json.get("experimentId"), + "sideLabel": meta_json.get("sideLabel"), + "step": meta_json.get("step"), + "rp": meta_json.get("rp"), + "scope": meta_json.get("scope"), + }) +} + +fn path_string(path: &Path) -> String { + path.to_string_lossy().into_owned() +} + +fn build_input_integrity(args: &Args, left_meta: &Value, right_meta: &Value) -> Value { + let mut issues = Vec::new(); + validate_side_metadata( + "left", + left_meta, + &args.left_ccr, + &args.left_cir, + &args.left_meta, + &mut issues, + ); + validate_side_metadata( + "right", + right_meta, + &args.right_ccr, + &args.right_cir, + &args.right_meta, + &mut issues, + ); + let left_step = left_meta["step"].as_str(); + let right_step = right_meta["step"].as_str(); + if left_step.is_some() && right_step.is_some() && left_step != right_step { + issues.push(format!( + "left/right step mismatch: left={} right={}", + left_step.unwrap_or("-"), + right_step.unwrap_or("-") + )); + } + let run_exit_ok = meta_exit_code(left_meta) == Some(0) && meta_exit_code(right_meta) == Some(0); + let ta_fixture_pinned = match ( + meta_ta_fixture_pinned(left_meta), + meta_ta_fixture_pinned(right_meta), + ) { + (Some(false), _) | (_, Some(false)) => Some(false), + (Some(true), Some(true)) => Some(true), + _ => None, + }; + json!({ + "metadataValid": issues.is_empty(), + "issues": issues, + "runExitOk": run_exit_ok, + "leftExitCode": meta_exit_code(left_meta), + "rightExitCode": meta_exit_code(right_meta), + "sameStep": left_step.is_some() && right_step.is_some() && left_step == right_step, + "step": if left_step == right_step { left_step } else { None }, + "scopeAll5": left_meta["scope"]["all5"].as_bool() == Some(true) + && right_meta["scope"]["all5"].as_bool() == Some(true), + "taFixturePinned": ta_fixture_pinned, + }) +} + +fn validate_side_metadata( + label: &str, + meta: &Value, + ccr: &Path, + cir: &Path, + meta_path: &Path, + issues: &mut Vec, +) { + if meta.get("schemaVersion").is_none() { + issues.push(format!("{label}: missing schemaVersion")); + } + if meta.get("experimentId").is_none() { + issues.push(format!("{label}: missing experimentId")); + } + if meta.get("sideLabel").is_none() { + issues.push(format!("{label}: missing sideLabel")); + } + if meta.get("step").is_none() { + issues.push(format!("{label}: missing step")); + } + if meta.get("scope").is_none() { + issues.push(format!("{label}: missing scope")); + } + validate_artifact_path(label, meta, "ccr", ccr, issues); + validate_artifact_path(label, meta, "cir", cir, issues); + validate_artifact_path(label, meta, "runMeta", meta_path, issues); +} + +fn validate_artifact_path( + label: &str, + meta: &Value, + field: &str, + actual: &Path, + issues: &mut Vec, +) { + let Some(recorded) = meta["artifacts"][field].as_str() else { + issues.push(format!("{label}: missing artifacts.{field}")); + return; + }; + if !paths_match(recorded, actual) { + issues.push(format!( + "{label}: artifacts.{field} does not match CLI input: meta={} cli={}", + recorded, + actual.display() + )); + } +} + +fn paths_match(recorded: &str, actual: &Path) -> bool { + let recorded_path = Path::new(recorded); + if recorded_path == actual || recorded == actual.to_string_lossy() { + return true; + } + if !recorded_path.is_absolute() + && let Ok(actual_real) = std::fs::canonicalize(actual) + { + let recorded_normalized = normalize_relative_path(recorded_path); + if actual_real.ends_with(&recorded_normalized) { + return true; + } + } + match ( + std::fs::canonicalize(recorded_path), + std::fs::canonicalize(actual), + ) { + (Ok(recorded_real), Ok(actual_real)) => recorded_real == actual_real, + _ => false, + } +} + +fn normalize_relative_path(path: &Path) -> PathBuf { + let mut normalized = PathBuf::new(); + for component in path.components() { + match component { + std::path::Component::CurDir => {} + std::path::Component::ParentDir => normalized.push(".."), + std::path::Component::Normal(part) => normalized.push(part), + std::path::Component::RootDir | std::path::Component::Prefix(_) => {} + } + } + normalized +} + +fn meta_exit_code(meta: &Value) -> Option { + meta["metrics"]["exitCode"].as_i64() +} + +fn meta_ta_fixture_pinned(meta: &Value) -> Option { + meta.pointer("/fixtureProof/taFixturePinned") + .and_then(Value::as_bool) + .or_else(|| { + meta.pointer("/fixtureProofSummary/taFixturePinned") + .and_then(Value::as_bool) + }) +} + +fn fixture_ta_map(meta: &Value) -> BTreeMap { + meta.pointer("/fixtureProof/trustAnchors") + .and_then(Value::as_array) + .into_iter() + .flatten() + .filter_map(|item| { + Some(( + item.get("taRsyncUri")?.as_str()?.to_string(), + item.get("taCertificateSha256")? + .as_str()? + .to_ascii_lowercase(), + )) + }) + .collect() +} + +fn trust_anchor_fixture_match( + actual: &BTreeMap, + fixture: &BTreeMap, +) -> Option { + if fixture.is_empty() { + return None; + } + Some(actual.iter().all(|(uri, hash)| { + fixture + .get(uri) + .is_some_and(|fixture_hash| fixture_hash == hash) + })) +} + +struct SetSummary { + left: usize, + right: usize, + match_: bool, + only_in_left: Vec, + only_in_right: Vec, +} + +impl SetSummary { + fn build(left: &BTreeSet, right: &BTreeSet, sample_limit: usize) -> Self { + Self { + left: left.len(), + right: right.len(), + match_: left == right, + only_in_left: left.difference(right).take(sample_limit).cloned().collect(), + only_in_right: right.difference(left).take(sample_limit).cloned().collect(), + } + } + + fn to_json_with(&self, mut encode: F) -> Value + where + F: FnMut(&T) -> Value, + { + json!({ + "left": self.left, + "right": self.right, + "match": self.match_, + "onlyInLeft": self.only_in_left.iter().map(&mut encode).collect::>(), + "onlyInRight": self.only_in_right.iter().map(&mut encode).collect::>(), + }) + } +} + +fn compare_sets( + left: &BTreeSet, + right: &BTreeSet, + sample_limit: usize, +) -> SetSummary { + SetSummary::build(left, right, sample_limit) +} + +struct ObjectSummary { + left: usize, + right: usize, + match_: bool, + only_in_left_count: usize, + only_in_right_count: usize, + hash_mismatch_count: usize, + only_in_left: Vec, + only_in_right: Vec, + hash_mismatches: Vec, + only_in_left_by_extension: BTreeMap, + only_in_right_by_extension: BTreeMap, + hash_mismatches_by_extension: BTreeMap, + only_in_left_by_host: Vec, + only_in_right_by_host: Vec, + hash_mismatches_by_host: Vec, +} + +impl ObjectSummary { + fn to_json(&self) -> Value { + json!({ + "left": self.left, + "right": self.right, + "match": self.match_, + "onlyInLeftCount": self.only_in_left_count, + "onlyInRightCount": self.only_in_right_count, + "hashMismatchCount": self.hash_mismatch_count, + "onlyInLeft": self.only_in_left, + "onlyInRight": self.only_in_right, + "hashMismatches": self.hash_mismatches, + "onlyInLeftByExtension": self.only_in_left_by_extension, + "onlyInRightByExtension": self.only_in_right_by_extension, + "hashMismatchesByExtension": self.hash_mismatches_by_extension, + "onlyInLeftByHostTop": self.only_in_left_by_host, + "onlyInRightByHostTop": self.only_in_right_by_host, + "hashMismatchesByHostTop": self.hash_mismatches_by_host, + }) + } +} + +fn compare_object_maps( + left: &BTreeMap, + right: &BTreeMap, + sample_limit: usize, +) -> ObjectSummary { + let left_keys = left.keys().cloned().collect::>(); + let right_keys = right.keys().cloned().collect::>(); + let only_in_left_all = left_keys + .difference(&right_keys) + .cloned() + .collect::>(); + let only_in_right_all = right_keys + .difference(&left_keys) + .cloned() + .collect::>(); + let mut hash_mismatch_uris = Vec::new(); + let mut hash_mismatches = Vec::new(); + for key in left_keys.intersection(&right_keys) { + let left_hash = left.get(key).expect("key from map"); + let right_hash = right.get(key).expect("key from map"); + if left_hash != right_hash { + hash_mismatch_uris.push(key.clone()); + if hash_mismatches.len() < sample_limit { + hash_mismatches.push(json!({ + "uri": key, + "leftSha256": left_hash, + "rightSha256": right_hash, + })); + } + } + } + ObjectSummary { + left: left.len(), + right: right.len(), + match_: left == right, + only_in_left_count: only_in_left_all.len(), + only_in_right_count: only_in_right_all.len(), + hash_mismatch_count: hash_mismatch_uris.len(), + only_in_left: only_in_left_all + .iter() + .take(sample_limit) + .cloned() + .collect(), + only_in_right: only_in_right_all + .iter() + .take(sample_limit) + .cloned() + .collect(), + hash_mismatches, + only_in_left_by_extension: group_by_extension(only_in_left_all.iter().map(String::as_str)), + only_in_right_by_extension: group_by_extension( + only_in_right_all.iter().map(String::as_str), + ), + hash_mismatches_by_extension: group_by_extension( + hash_mismatch_uris.iter().map(String::as_str), + ), + only_in_left_by_host: top_hosts(only_in_left_all.iter().map(String::as_str), sample_limit), + only_in_right_by_host: top_hosts( + only_in_right_all.iter().map(String::as_str), + sample_limit, + ), + hash_mismatches_by_host: top_hosts( + hash_mismatch_uris.iter().map(String::as_str), + sample_limit, + ), + } +} + +fn group_by_extension<'a>(uris: impl IntoIterator) -> BTreeMap { + let mut counts = BTreeMap::new(); + for uri in uris { + *counts.entry(uri_extension(uri)).or_insert(0) += 1; + } + counts +} + +fn top_hosts<'a>(uris: impl IntoIterator, limit: usize) -> Vec { + let mut counts = BTreeMap::new(); + for uri in uris { + *counts.entry(uri_host(uri)).or_insert(0usize) += 1; + } + let mut rows = counts.into_iter().collect::>(); + rows.sort_by(|(host_a, count_a), (host_b, count_b)| { + count_b.cmp(count_a).then_with(|| host_a.cmp(host_b)) + }); + rows.into_iter() + .take(limit) + .map(|(host, count)| { + json!({ + "host": host, + "count": count, + }) + }) + .collect() +} + +fn uri_host(uri: &str) -> String { + let without_scheme = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri); + without_scheme + .split('/') + .next() + .filter(|host| !host.is_empty()) + .unwrap_or("") + .to_string() +} + +fn uri_extension(uri: &str) -> String { + let path = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri); + let path = path.split_once('/').map(|(_, path)| path).unwrap_or(path); + let file = path.rsplit('/').next().unwrap_or(path); + if let Some((_, ext)) = file.rsplit_once('.') + && !ext.is_empty() + { + return format!(".{}", ext.to_ascii_lowercase()); + } + "".to_string() +} + +fn read_file(path: &Path) -> Result, String> { + std::fs::read(path).map_err(|e| format!("read file failed: {}: {e}", path.display())) +} + +fn read_json(path: &Path) -> Result { + serde_json::from_slice(&read_file(path)?) + .map_err(|e| format!("decode json failed: {}: {e}", path.display())) +} + +fn write_json(path: &Path, value: &Value) -> Result<(), String> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?; + } + std::fs::write( + path, + serde_json::to_vec_pretty(value).map_err(|e| e.to_string())?, + ) + .map_err(|e| format!("write json failed: {}: {e}", path.display())) +} + +fn write_markdown(path: &Path, triage: &Value) -> Result<(), String> { + let lines = vec![ + "# CCR/CIR Triage Summary".to_string(), + String::new(), + format!( + "- `diagnosis`: `{}`", + triage["diagnosis"].as_str().unwrap_or("-") + ), + format!( + "- `allMatch`: `{}`", + triage["allMatch"].as_bool().unwrap_or(false) + ), + format!( + "- `ccrStateDigestMatch`: `{}`", + triage["ccr"]["stateDigestMatch"].as_bool().unwrap_or(false) + ), + format!( + "- `cirObjectsMatch`: `{}`", + triage["cir"]["objects"]["match"].as_bool().unwrap_or(false) + ), + format!( + "- `cirRejectsMatch`: `{}`", + triage["cir"]["rejects"]["match"].as_bool().unwrap_or(false) + ), + format!( + "- `cirTrustAnchorsMatch`: `{}`", + triage["cir"]["trustAnchors"]["match"] + .as_bool() + .unwrap_or(false) + ), + ]; + std::fs::write(path, lines.join("\n") + "\n") + .map_err(|e| format!("write markdown failed: {}: {e}", path.display())) +} + +fn write_samples_jsonl(path: &Path, triage: &Value) -> Result<(), String> { + let mut rows = Vec::new(); + push_rows( + &mut rows, + "cir.object.only_in_left", + &triage["cir"]["objects"]["onlyInLeft"], + ); + push_rows( + &mut rows, + "cir.object.only_in_right", + &triage["cir"]["objects"]["onlyInRight"], + ); + push_rows( + &mut rows, + "cir.object.hash_mismatch", + &triage["cir"]["objects"]["hashMismatches"], + ); + push_rows( + &mut rows, + "cir.reject.only_in_left", + &triage["cir"]["rejects"]["onlyInLeft"], + ); + push_rows( + &mut rows, + "cir.reject.only_in_right", + &triage["cir"]["rejects"]["onlyInRight"], + ); + let mut body = String::new(); + for row in rows { + body.push_str(&serde_json::to_string(&row).map_err(|e| e.to_string())?); + body.push('\n'); + } + std::fs::write(path, body).map_err(|e| format!("write jsonl failed: {}: {e}", path.display())) +} + +fn push_rows(rows: &mut Vec, kind: &str, values: &Value) { + if let Some(items) = values.as_array() { + for item in items { + rows.push(json!({ + "kind": kind, + "value": item, + })); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rpki::ccr::{ + CcrContentInfo, CcrDigestAlgorithm, ManifestState, RpkiCanonicalCacheRepresentation, + build_aspa_payload_state, build_roa_payload_state, encode_content_info, + }; + use rpki::cir::{ + CIR_VERSION_V3, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, + CirRejectedObject, CirTrustAnchor, compute_reject_list_sha256, encode_cir, sha256, + }; + use rpki::data_model::roa::{IpPrefix, RoaAfi}; + use rpki::validation::objects::{AspaAttestation, Vrp}; + + #[test] + fn parse_args_accepts_left_right_inputs() { + let args = parse_args(&[ + "triage_ccr_cir_pair".to_string(), + "--left-ccr".to_string(), + "left.ccr".to_string(), + "--left-cir".to_string(), + "left.cir".to_string(), + "--left-meta".to_string(), + "left.json".to_string(), + "--right-ccr".to_string(), + "right.ccr".to_string(), + "--right-cir".to_string(), + "right.cir".to_string(), + "--right-meta".to_string(), + "right.json".to_string(), + "--out-dir".to_string(), + "out".to_string(), + "--sample-limit".to_string(), + "9".to_string(), + ]) + .expect("parse args"); + assert_eq!(args.left_ccr, PathBuf::from("left.ccr")); + assert_eq!(args.right_cir, PathBuf::from("right.cir")); + assert_eq!(args.sample_limit, 9); + } + + #[test] + fn diagnose_prefers_cir_explanation_order() { + let ccr = json!({ + "stateDigestMatch": false, + "compareViews": { + "vrps": {"match": false}, + "vaps": {"match": true} + } + }); + let integrity = json!({ + "metadataValid": true, + "runExitOk": true, + }); + let mut cir = json!({ + "trustAnchors": {"match": false}, + "objects": { + "match": false, + "onlyInLeftCount": 1, + "onlyInRightCount": 0, + "hashMismatchCount": 0 + }, + "rejects": {"match": false}, + "rejectListSha256Match": false + }); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "trust_anchor_input_difference" + ); + cir["trustAnchors"]["match"] = json!(true); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "sync_input_object_difference" + ); + cir["objects"]["onlyInLeftCount"] = json!(0); + cir["objects"]["hashMismatchCount"] = json!(1); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "sync_input_object_content_difference" + ); + cir["objects"]["match"] = json!(true); + cir["objects"]["hashMismatchCount"] = json!(0); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "validation_reject_policy_difference" + ); + cir["rejects"]["match"] = json!(true); + cir["rejectListSha256Match"] = json!(true); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "unknown_needs_manual_object_analysis" + ); + } + + #[test] + fn diagnose_covers_same_state_and_ccr_only_paths() { + let integrity = json!({ + "metadataValid": true, + "runExitOk": true, + }); + let mut ccr = json!({ + "stateDigestMatch": true, + "compareViews": null + }); + let cir = json!({ + "allMatch": true, + "trustAnchors": {"match": true}, + "objects": { + "match": true, + "onlyInLeftCount": 0, + "onlyInRightCount": 0, + "hashMismatchCount": 0 + }, + "rejects": {"match": true}, + "rejectListSha256Match": true + }); + assert_eq!(diagnose(&ccr, &cir, &integrity), "same_state"); + + ccr["stateDigestMatch"] = json!(false); + ccr["compareViews"] = json!({ + "vrps": {"match": true}, + "vaps": {"match": true} + }); + assert_eq!( + diagnose(&ccr, &cir, &integrity), + "ccr_projection_or_encoding_difference" + ); + } + + #[test] + fn run_reports_same_state_with_fixture_files() { + let temp = tempfile::tempdir().expect("tempdir"); + let left = write_side_bundle( + temp.path(), + "left", + &sample_ccr(64496, None), + sample_cir(0x11, None, "ta"), + ); + let right = write_side_bundle( + temp.path(), + "right", + &sample_ccr(64496, None), + sample_cir(0x11, None, "ta"), + ); + let out = temp.path().join("compare"); + + run(Args { + left_ccr: left.0, + left_cir: left.1, + left_meta: left.2, + right_ccr: right.0, + right_cir: right.1, + right_meta: right.2, + out_dir: out.clone(), + sample_limit: 20, + compare_view_trust_anchor: "test".to_string(), + }) + .expect("run"); + + let triage: Value = + serde_json::from_slice(&std::fs::read(out.join("triage.json")).expect("read triage")) + .expect("triage json"); + assert_eq!(triage["primaryDiagnosis"], "same_state"); + assert_eq!(triage["allMatch"], true); + assert!(out.join("ccr-summary.json").exists()); + assert!(out.join("cir-summary.json").exists()); + assert!(out.join("diff-samples.jsonl").exists()); + } + + #[test] + fn run_reports_ccr_only_difference_with_same_cir() { + let temp = tempfile::tempdir().expect("tempdir"); + let left = write_side_bundle( + temp.path(), + "left", + &sample_ccr(64496, Some(0x11)), + sample_cir(0x11, None, "ta"), + ); + let right = write_side_bundle( + temp.path(), + "right", + &sample_ccr(64496, Some(0x22)), + sample_cir(0x11, None, "ta"), + ); + let out = temp.path().join("compare"); + + run(Args { + left_ccr: left.0, + left_cir: left.1, + left_meta: left.2, + right_ccr: right.0, + right_cir: right.1, + right_meta: right.2, + out_dir: out.clone(), + sample_limit: 20, + compare_view_trust_anchor: "test".to_string(), + }) + .expect("run"); + + let triage: Value = + serde_json::from_slice(&std::fs::read(out.join("triage.json")).expect("read triage")) + .expect("triage json"); + assert_eq!( + triage["primaryDiagnosis"], + "ccr_projection_or_encoding_difference" + ); + assert_eq!( + triage["ccr"]["comparePath"], + "ccr_state_digest_mismatch_with_compare_views_match" + ); + } + + #[test] + fn run_reports_trust_anchor_object_and_reject_differences() { + let temp = tempfile::tempdir().expect("tempdir"); + let ccr = sample_ccr(64496, None); + + let ta_left = write_side_bundle( + temp.path(), + "ta-left", + &ccr, + sample_cir_with_ta(0x11, None, sample_trust_anchor("ta-a")), + ); + let ta_right = write_side_bundle( + temp.path(), + "ta-right", + &ccr, + sample_cir_with_ta(0x11, None, sample_trust_anchor("ta-b")), + ); + assert_eq!( + run_and_read_diagnosis(temp.path(), "ta", ta_left, ta_right), + "trust_anchor_input_difference" + ); + + let object_left = + write_side_bundle(temp.path(), "obj-left", &ccr, sample_cir(0x11, None, "ta")); + let object_right = + write_side_bundle(temp.path(), "obj-right", &ccr, sample_cir(0x22, None, "ta")); + assert_eq!( + run_and_read_diagnosis(temp.path(), "object", object_left, object_right), + "sync_input_object_content_difference" + ); + + let reject_left = write_side_bundle( + temp.path(), + "rej-left", + &ccr, + sample_cir( + 0x11, + Some("rsync://example.net/repo/rejected-left.roa"), + "ta", + ), + ); + let reject_right = write_side_bundle( + temp.path(), + "rej-right", + &ccr, + sample_cir( + 0x11, + Some("rsync://example.net/repo/rejected-right.roa"), + "ta", + ), + ); + assert_eq!( + run_and_read_diagnosis(temp.path(), "reject", reject_left, reject_right), + "validation_reject_policy_difference" + ); + } + + #[test] + fn trust_anchor_compare_uses_fixture_ta_hash_not_tal_uri() { + let temp = tempfile::tempdir().expect("tempdir"); + let ccr = sample_ccr(64496, None); + let left_ta = sample_trust_anchor("ta"); + let mut right_ta = sample_trust_anchor("ta"); + right_ta.tal_uri = "https://different.example.net/ta.tal".to_string(); + let left = write_side_bundle( + temp.path(), + "left-tal-uri", + &ccr, + sample_cir_with_ta(0x11, None, left_ta), + ); + let right = write_side_bundle( + temp.path(), + "right-tal-uri", + &ccr, + sample_cir_with_ta(0x11, None, right_ta), + ); + assert_eq!( + run_and_read_diagnosis(temp.path(), "ta-uri", left, right), + "same_state" + ); + } + + #[test] + fn fixture_match_allows_strict_policy_to_drop_trust_anchor() { + let mut fixture = BTreeMap::new(); + fixture.insert( + "rsync://example.net/repo/ta-a.cer".to_string(), + "aaaaaaaa".to_string(), + ); + fixture.insert( + "rsync://example.net/repo/ta-b.cer".to_string(), + "bbbbbbbb".to_string(), + ); + let mut actual = BTreeMap::new(); + actual.insert( + "rsync://example.net/repo/ta-a.cer".to_string(), + "aaaaaaaa".to_string(), + ); + + assert_eq!(trust_anchor_fixture_match(&actual, &fixture), Some(true)); + + actual.insert( + "rsync://example.net/repo/ta-online.cer".to_string(), + "cccccccc".to_string(), + ); + assert_eq!(trust_anchor_fixture_match(&actual, &fixture), Some(false)); + } + + #[test] + fn grouping_helpers_are_stable() { + let uris = [ + "rsync://b.example/repo/a.ROA", + "rsync://a.example/repo/b.mft", + "rsync://b.example/repo/c", + ]; + let extensions = group_by_extension(uris.iter().copied()); + assert_eq!(extensions.get(".roa").copied(), Some(1)); + assert_eq!(extensions.get(".mft").copied(), Some(1)); + assert_eq!(extensions.get("").copied(), Some(1)); + let hosts = top_hosts(uris.iter().copied(), 1); + assert_eq!(hosts[0]["host"], "b.example"); + assert_eq!(hosts[0]["count"], 2); + } + + fn run_and_read_diagnosis( + root: &Path, + name: &str, + left: (PathBuf, PathBuf, PathBuf), + right: (PathBuf, PathBuf, PathBuf), + ) -> String { + let out = root.join(format!("compare-{name}")); + run(Args { + left_ccr: left.0, + left_cir: left.1, + left_meta: left.2, + right_ccr: right.0, + right_cir: right.1, + right_meta: right.2, + out_dir: out.clone(), + sample_limit: 20, + compare_view_trust_anchor: "test".to_string(), + }) + .expect("run"); + let triage: Value = + serde_json::from_slice(&std::fs::read(out.join("triage.json")).expect("read triage")) + .expect("triage json"); + triage["primaryDiagnosis"].as_str().unwrap().to_string() + } + + fn write_side_bundle( + root: &Path, + label: &str, + ccr: &[u8], + cir: CanonicalInputRepresentation, + ) -> (PathBuf, PathBuf, PathBuf) { + let dir = root.join(label); + std::fs::create_dir_all(&dir).expect("side dir"); + let ccr_path = dir.join("result.ccr"); + let cir_path = dir.join("result.cir"); + let meta_path = dir.join("run-meta.json"); + std::fs::write(&ccr_path, ccr).expect("write ccr"); + std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir"); + let fixture_proof = fixture_proof_for_cir(&cir); + let meta = json!({ + "schemaVersion": 1, + "experimentId": "test", + "sideLabel": label, + "step": "snapshot", + "scope": {"rirs": ["apnic"], "all5": false}, + "rp": {"kind": "test", "mode": "standard"}, + "metrics": {"exitCode": 0}, + "fixtureProof": fixture_proof, + "artifacts": { + "ccr": ccr_path.to_string_lossy(), + "cir": cir_path.to_string_lossy(), + "runMeta": meta_path.to_string_lossy() + } + }); + std::fs::write( + &meta_path, + serde_json::to_vec_pretty(&meta).expect("encode meta"), + ) + .expect("write meta"); + (ccr_path, cir_path, meta_path) + } + + fn sample_ccr(asn: u32, manifest_hash_fill: Option) -> Vec { + let vrps = build_roa_payload_state(&[Vrp { + asn, + prefix: IpPrefix { + afi: RoaAfi::Ipv4, + prefix_len: 24, + addr: [192, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + }, + max_length: 24, + }]) + .expect("build vrps"); + let vaps = build_aspa_payload_state(&[AspaAttestation { + customer_as_id: asn, + provider_as_ids: vec![64497], + }]) + .expect("build vaps"); + let mfts = manifest_hash_fill.map(|fill| ManifestState { + mis: Vec::new(), + most_recent_update: time::OffsetDateTime::UNIX_EPOCH, + hash: vec![fill; 32], + }); + let ccr = CcrContentInfo::new(RpkiCanonicalCacheRepresentation { + version: 0, + hash_alg: CcrDigestAlgorithm::Sha256, + produced_at: time::OffsetDateTime::UNIX_EPOCH, + mfts, + vrps: Some(vrps), + vaps: Some(vaps), + tas: None, + rks: None, + }); + encode_content_info(&ccr).expect("encode ccr") + } + + fn sample_cir( + object_hash_fill: u8, + rejected_uri: Option<&str>, + trust_anchor_name: &str, + ) -> CanonicalInputRepresentation { + sample_cir_with_ta( + object_hash_fill, + rejected_uri, + sample_trust_anchor(trust_anchor_name), + ) + } + + fn sample_cir_with_ta( + object_hash_fill: u8, + rejected_uri: Option<&str>, + trust_anchor: CirTrustAnchor, + ) -> CanonicalInputRepresentation { + let rejected_objects = rejected_uri + .map(|uri| { + vec![CirRejectedObject { + object_uri: uri.to_string(), + reason: Some("test".to_string()), + }] + }) + .unwrap_or_default(); + CanonicalInputRepresentation { + version: CIR_VERSION_V3, + hash_alg: CirHashAlgorithm::Sha256, + validation_time: time::OffsetDateTime::UNIX_EPOCH, + objects: vec![CirObject { + rsync_uri: "rsync://example.net/repo/a.roa".to_string(), + sha256: vec![object_hash_fill; 32], + }], + trust_anchors: vec![trust_anchor], + reject_list_sha256: compute_reject_list_sha256( + rejected_objects.iter().map(|item| item.object_uri.as_str()), + ), + rejected_objects, + } + } + + fn sample_trust_anchor(name: &str) -> CirTrustAnchor { + let ta_rsync_uri = format!("rsync://example.net/repo/{name}.cer"); + let tal_uri = format!("https://tal.example.net/{name}.tal"); + let ta_certificate_der = format!("ta-der-{name}").into_bytes(); + CirTrustAnchor { + ta_rsync_uri: ta_rsync_uri.clone(), + tal_uri, + tal_bytes: format!("{ta_rsync_uri}\n\nAQID\n").into_bytes(), + ta_certificate_sha256: sha256(&ta_certificate_der), + ta_certificate_der, + } + } + + fn fixture_proof_for_cir(cir: &CanonicalInputRepresentation) -> Value { + json!({ + "schemaVersion": 1, + "taFixturePinned": true, + "trustAnchors": cir.trust_anchors.iter().map(|item| json!({ + "taRsyncUri": item.ta_rsync_uri, + "taCertificateSha256": hex::encode(&item.ta_certificate_sha256), + })).collect::>(), + }) + } +} diff --git a/src/validation/objects.rs b/src/validation/objects.rs index 0973c5a..bbe4b58 100644 --- a/src/validation/objects.rs +++ b/src/validation/objects.rs @@ -189,12 +189,55 @@ pub fn process_publication_point_for_issuer_with_options = Vec::new(); // Enforce that `manifest_bytes` is actually a manifest object. - let _manifest = ManifestObject::decode_der_with_strict_options( + let _manifest = match ManifestObject::decode_der_with_strict_options( manifest_bytes, policy.strict.cms_der, policy.strict.name, - ) - .expect("publication point snapshot manifest decodes"); + ) { + Ok(manifest) => manifest, + Err(e) => { + stats.publication_point_dropped = true; + warnings.push( + Warning::new(format!( + "dropping publication point: manifest decode failed: {e}" + )) + .with_rfc_refs(&[ + RfcRef("RFC 9286 §4"), + RfcRef("RFC 9286 §6.2"), + RfcRef("RFC 9286 §6.6"), + ]) + .with_context(manifest_rsync_uri), + ); + for f in locked_files { + if f.rsync_uri.ends_with(".roa") { + audit.push(ObjectAuditEntry { + rsync_uri: f.rsync_uri.clone(), + sha256_hex: sha256_hex_from_32(&f.sha256), + kind: AuditObjectKind::Roa, + result: AuditObjectResult::Skipped, + detail: Some("skipped: manifest decode failed".to_string()), + }); + } else if f.rsync_uri.ends_with(".asa") { + audit.push(ObjectAuditEntry { + rsync_uri: f.rsync_uri.clone(), + sha256_hex: sha256_hex_from_32(&f.sha256), + kind: AuditObjectKind::Aspa, + result: AuditObjectResult::Skipped, + detail: Some("skipped: manifest decode failed".to_string()), + }); + } + } + return ObjectsOutput { + vrps: Vec::new(), + aspas: Vec::new(), + router_keys: Vec::new(), + local_outputs_cache: Vec::new(), + warnings, + stats, + audit, + }; + } + }; // Decode issuer CA once; if it fails we cannot validate ROA/ASPA EE certificates. let issuer_ca = match decode_resource_certificate_with_policy(issuer_ca_der, policy) { @@ -978,12 +1021,55 @@ pub(crate) fn prepare_publication_point_for_parallel_roa = Vec::new(); - let _manifest = ManifestObject::decode_der_with_strict_options( + let _manifest = match ManifestObject::decode_der_with_strict_options( manifest_bytes, policy.strict.cms_der, policy.strict.name, - ) - .expect("publication point snapshot manifest decodes"); + ) { + Ok(manifest) => manifest, + Err(e) => { + stats.publication_point_dropped = true; + warnings.push( + Warning::new(format!( + "dropping publication point: manifest decode failed: {e}" + )) + .with_rfc_refs(&[ + RfcRef("RFC 9286 §4"), + RfcRef("RFC 9286 §6.2"), + RfcRef("RFC 9286 §6.6"), + ]) + .with_context(manifest_rsync_uri), + ); + for f in locked_files { + if f.rsync_uri.ends_with(".roa") { + audit.push(ObjectAuditEntry { + rsync_uri: f.rsync_uri.clone(), + sha256_hex: sha256_hex_from_32(&f.sha256), + kind: AuditObjectKind::Roa, + result: AuditObjectResult::Skipped, + detail: Some("skipped: manifest decode failed".to_string()), + }); + } else if f.rsync_uri.ends_with(".asa") { + audit.push(ObjectAuditEntry { + rsync_uri: f.rsync_uri.clone(), + sha256_hex: sha256_hex_from_32(&f.sha256), + kind: AuditObjectKind::Aspa, + result: AuditObjectResult::Skipped, + detail: Some("skipped: manifest decode failed".to_string()), + }); + } + } + return ParallelObjectsPrepare::Complete(ObjectsOutput { + vrps: Vec::new(), + aspas: Vec::new(), + router_keys: Vec::new(), + local_outputs_cache: Vec::new(), + warnings, + stats, + audit, + }); + } + }; let issuer_ca = match decode_resource_certificate_with_policy(issuer_ca_der, policy) { Ok(v) => v, @@ -2403,7 +2489,11 @@ mod tests { Afi, AsIdOrRange, AsIdentifierChoice, IpAddressFamily, IpAddressOrRange, IpAddressRange, IpPrefix, IpResourceSet, }; + use crate::policy::Policy; + use crate::storage::PackTime; + use crate::validation::publication_point::PublicationPointSnapshot; use std::collections::HashMap; + use time::OffsetDateTime; fn fixture_bytes(path: &str) -> Vec { std::fs::read(std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(path)) @@ -2874,4 +2964,41 @@ mod tests { assert_eq!(roa_afi_to_string(RoaAfi::Ipv4), "ipv4"); assert_eq!(roa_afi_to_string(RoaAfi::Ipv6), "ipv6"); } + + #[test] + fn strict_name_manifest_decode_failure_drops_publication_point() { + let publication_point = PublicationPointSnapshot { + format_version: PublicationPointSnapshot::FORMAT_VERSION_V1, + manifest_rsync_uri: "rsync://example.test/repo/manifest.mft".to_string(), + publication_point_rsync_uri: "rsync://example.test/repo/".to_string(), + manifest_number_be: vec![0x01], + this_update: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()), + next_update: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()), + verified_at: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()), + manifest_bytes: vec![0x01, 0x02, 0x03], + files: vec![], + }; + let policy = Policy::default(); + let output = process_publication_point_for_issuer_with_options( + &publication_point, + &policy, + &[], + None, + None, + None, + OffsetDateTime::now_utc(), + None, + false, + ); + assert!(output.stats.publication_point_dropped); + assert!(output.vrps.is_empty()); + assert!( + output + .warnings + .iter() + .any(|warning| warning.message.contains("manifest decode failed")), + "{:?}", + output.warnings + ); + } }