rpki/scripts/experiments/feature035/feature035_bundle.py

432 lines
16 KiB
Python
Executable File

#!/usr/bin/env python3
import argparse
import hashlib
import json
import os
import platform
import subprocess
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
RIR_FIXTURES = {
"afrinic": {
"tal": "tal/afrinic.tal",
"ta": "ta/afrinic-ta.cer",
},
"apnic": {
"tal": "tal/apnic-rfc7730-https.tal",
"ta": "ta/apnic-ta.cer",
},
"arin": {
"tal": "tal/arin.tal",
"ta": "ta/arin-ta.cer",
},
"lacnic": {
"tal": "tal/lacnic.tal",
"ta": "ta/lacnic-ta.cer",
},
"ripe": {
"tal": "tal/ripe-ncc.tal",
"ta": "ta/ripe-ncc-ta.cer",
},
}
def utc_now() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
def sha256_file(path: Path) -> str:
hasher = hashlib.sha256()
with path.open("rb") as file:
for chunk in iter(lambda: file.read(1024 * 1024), b""):
hasher.update(chunk)
return hasher.hexdigest()
def read_tal_uris(path: Path) -> list[str]:
uris: list[str] = []
with path.open("r", encoding="utf-8") as file:
for line in file:
item = line.strip()
if not item or item.startswith("#"):
if uris:
break
continue
if item.startswith(("rsync://", "https://", "http://")):
uris.append(item)
continue
if uris:
break
return uris
def first_uri(uris: list[str], prefixes: tuple[str, ...]) -> str | None:
for uri in uris:
if uri.startswith(prefixes):
return uri
return None
def parse_rirs(raw: str) -> list[str]:
rirs = [item.strip().lower() for item in raw.split(",") if item.strip()]
if not rirs:
raise SystemExit("RIR list must not be empty")
invalid = [item for item in rirs if item not in RIR_FIXTURES]
if invalid:
raise SystemExit(
f"invalid RIR(s): {','.join(invalid)}; allowed: {','.join(RIR_FIXTURES)}"
)
return rirs
def rel_or_abs(path: Path, root: Path | None) -> str:
path = path.resolve()
if root is not None:
try:
return path.relative_to(root.resolve()).as_posix()
except ValueError:
pass
return path.as_posix()
def git_commit(repo_root: Path) -> str | None:
try:
return subprocess.check_output(
["git", "-C", str(repo_root), "rev-parse", "--short", "HEAD"],
text=True,
stderr=subprocess.DEVNULL,
).strip()
except (subprocess.CalledProcessError, FileNotFoundError):
return None
def write_json(path: Path, value: dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(value, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
def build_fixture_proof(
fixture_dir: Path,
rirs: list[str],
repo_root: Path | None,
ta_online_fetch_observed: bool,
) -> dict[str, Any]:
trust_anchors = []
for rir in rirs:
mapping = RIR_FIXTURES[rir]
tal_path = fixture_dir / mapping["tal"]
ta_path = fixture_dir / mapping["ta"]
if not tal_path.is_file():
raise SystemExit(f"missing TAL fixture for {rir}: {tal_path}")
if not ta_path.is_file():
raise SystemExit(f"missing TA fixture for {rir}: {ta_path}")
tal_uris = read_tal_uris(tal_path)
trust_anchors.append(
{
"rir": rir,
"talPath": rel_or_abs(tal_path, repo_root),
"taPath": rel_or_abs(ta_path, repo_root),
"talUri": first_uri(tal_uris, ("https://", "http://")),
"taRsyncUri": first_uri(tal_uris, ("rsync://",)),
"talSha256": sha256_file(tal_path),
"taCertificateSha256": sha256_file(ta_path),
"talBytes": tal_path.stat().st_size,
"taCertificateBytes": ta_path.stat().st_size,
"taFixturePinned": not ta_online_fetch_observed,
"taOnlineFetchObserved": ta_online_fetch_observed,
}
)
return {
"schemaVersion": 1,
"generatedBy": "feature035-experiment-driver",
"generatedAtUtc": utc_now(),
"fixtureDir": rel_or_abs(fixture_dir, repo_root),
"all5": set(rirs) == set(RIR_FIXTURES),
"rirs": rirs,
"trustAnchors": trust_anchors,
}
def parse_csv(raw: str) -> list[str]:
if not raw:
return []
return [item.strip() for item in raw.split(",") if item.strip()]
def optional_path(raw: str | None, repo_root: Path | None) -> str | None:
if raw is None:
return None
return rel_or_abs(Path(raw), repo_root)
def build_run_meta(args: argparse.Namespace) -> dict[str, Any]:
rirs = parse_rirs(args.rirs)
repo_root = Path(args.repo_root).resolve() if args.repo_root else None
argv = json.loads(args.argv_json) if args.argv_json else []
env_whitelist = json.loads(args.env_json) if args.env_json else {}
fixture_proof_summary = (
json.loads(args.fixture_proof_summary_json)
if args.fixture_proof_summary_json
else None
)
fixture_proof = None
if args.fixture_proof:
fixture_proof_path = Path(args.fixture_proof)
if fixture_proof_path.is_file():
fixture_proof = json.loads(fixture_proof_path.read_text(encoding="utf-8"))
if not isinstance(argv, list):
raise SystemExit("--argv-json must decode to a JSON array")
if not isinstance(env_whitelist, dict):
raise SystemExit("--env-json must decode to a JSON object")
if fixture_proof_summary is not None and not isinstance(fixture_proof_summary, dict):
raise SystemExit("--fixture-proof-summary-json must decode to a JSON object")
return {
"schemaVersion": 1,
"generatedBy": "feature035-experiment-driver",
"generatedAtUtc": utc_now(),
"experimentId": args.experiment_id,
"side": args.side,
"sideLabel": args.side_label,
"step": args.step,
"runId": args.run_id,
"liveRun": not args.replay_used,
"replayUsed": args.replay_used,
"rp": {
"kind": args.rp_kind,
"binary": args.rp_binary,
"version": args.rp_version,
"gitCommit": args.rp_git_commit,
"mode": args.rp_mode,
"protocolMode": args.protocol_mode,
"strictPolicies": parse_csv(args.strict_policies),
},
"scope": {
"rirs": rirs,
"all5": set(rirs) == set(RIR_FIXTURES),
},
"command": {
"argv": argv,
"cwd": args.cwd,
"envWhitelist": env_whitelist,
},
"state": {
"resetBeforeRun": args.reset_before_run,
"stateRoot": args.state_root,
"db": args.db,
"repoBytesDb": args.repo_bytes_db,
"rawStoreDb": args.raw_store_db,
"rsyncMirrorRoot": args.rsync_mirror_root,
"cacheRoot": args.cache_root,
},
"artifacts": {
"ccr": optional_path(args.ccr, repo_root),
"cir": optional_path(args.cir, repo_root),
"runMeta": optional_path(str(args.out), repo_root),
"fixtureProof": optional_path(args.fixture_proof, repo_root),
"reportJson": optional_path(args.report_json, repo_root),
"stageTimingJson": optional_path(args.stage_timing_json, repo_root),
"stdoutLog": optional_path(args.stdout_log, repo_root),
"stderrLog": optional_path(args.stderr_log, repo_root),
"processTime": optional_path(args.process_time, repo_root),
"vrpsCsv": optional_path(args.vrps_csv, repo_root),
"vapsCsv": optional_path(args.vaps_csv, repo_root),
},
"fixtureProof": fixture_proof,
"fixtureProofSummary": fixture_proof_summary,
"metrics": {
"exitCode": args.exit_code,
"wallMs": args.wall_ms,
"maxRssKb": args.max_rss_kb,
"vrps": args.vrps,
"vaps": args.vaps,
"publicationPoints": args.publication_points,
"warnings": args.warnings,
"cirObjectCount": args.cir_object_count,
"cirRejectCount": args.cir_reject_count,
"cirTrustAnchorCount": args.cir_trust_anchor_count,
"ccrStateDigest": args.ccr_state_digest,
},
"environment": {
"host": args.host or platform.node(),
"platform": args.platform or platform.platform(),
},
}
def command_fixture_proof(args: argparse.Namespace) -> None:
repo_root = Path(args.repo_root).resolve() if args.repo_root else None
proof = build_fixture_proof(
fixture_dir=Path(args.fixture_dir),
rirs=parse_rirs(args.rirs),
repo_root=repo_root,
ta_online_fetch_observed=args.ta_online_fetch_observed,
)
write_json(Path(args.out), proof)
def command_run_meta(args: argparse.Namespace) -> None:
write_json(Path(args.out), build_run_meta(args))
def command_dry_run_bundle(args: argparse.Namespace) -> None:
out_dir = Path(args.out_dir)
repo_root = Path(args.repo_root).resolve() if args.repo_root else Path.cwd().resolve()
fixture_proof = out_dir / "fixture-proof.json"
command_fixture_proof(
argparse.Namespace(
fixture_dir=args.fixture_dir,
rirs=args.rirs,
repo_root=str(repo_root),
out=str(fixture_proof),
ta_online_fetch_observed=False,
)
)
for side, side_label in (("left", "A"), ("right", "B")):
run_dir = out_dir / side_label / "snapshot"
meta_args = argparse.Namespace(
out=run_dir / "run-meta.json",
repo_root=str(repo_root),
experiment_id=args.experiment_id,
side=side,
side_label=side_label,
step="snapshot",
run_id=f"{side_label}-snapshot-dry-run",
replay_used=False,
rp_kind="ours" if side_label == "A" else "rpki-client",
rp_binary=f"bin/{'rpki' if side_label == 'A' else 'rpki-client'}",
rp_version="dry-run",
rp_git_commit=git_commit(repo_root),
rp_mode="standard",
protocol_mode="rrdp+rsync",
strict_policies="",
rirs=args.rirs,
argv_json=json.dumps(["dry-run"]),
env_json=json.dumps({"RPKI_PROGRESS_LOG": "1"}),
cwd=str(out_dir),
reset_before_run=True,
state_root=str(out_dir / side_label / "state"),
db=str(out_dir / side_label / "state" / "work-db"),
repo_bytes_db=str(out_dir / side_label / "state" / "repo-bytes.db"),
raw_store_db=str(out_dir / side_label / "state" / "raw-store.db"),
rsync_mirror_root=str(out_dir / side_label / "state" / "rsync-mirror"),
cache_root=str(out_dir / side_label / "state" / "cache"),
ccr=str(run_dir / "result.ccr"),
cir=str(run_dir / "result.cir"),
fixture_proof=str(fixture_proof),
report_json=str(run_dir / "report.json"),
stage_timing_json=str(run_dir / "stage-timing.json"),
stdout_log=str(run_dir / "stdout.log"),
stderr_log=str(run_dir / "stderr.log"),
process_time=str(run_dir / "process-time.txt"),
vrps_csv=str(run_dir / "vrps.csv"),
vaps_csv=str(run_dir / "vaps.csv"),
exit_code=0,
wall_ms=0,
max_rss_kb=0,
vrps=0,
vaps=0,
publication_points=0,
warnings=0,
cir_object_count=0,
cir_reject_count=0,
cir_trust_anchor_count=len(parse_rirs(args.rirs)),
ccr_state_digest=None,
fixture_proof_summary_json=json.dumps(
{
"taFixturePinned": True,
"taOnlineFetchObserved": False,
"trustAnchorCount": len(parse_rirs(args.rirs)),
}
),
)
command_run_meta(meta_args)
def add_run_meta_args(parser: argparse.ArgumentParser) -> None:
parser.add_argument("--out", required=True)
parser.add_argument("--repo-root")
parser.add_argument("--experiment-id", required=True)
parser.add_argument("--side", choices=["left", "right"], required=True)
parser.add_argument("--side-label", choices=["A", "B"], required=True)
parser.add_argument("--step", choices=["snapshot", "delta"], required=True)
parser.add_argument("--run-id", required=True)
parser.add_argument("--replay-used", action="store_true")
parser.add_argument("--rp-kind", required=True)
parser.add_argument("--rp-binary", required=True)
parser.add_argument("--rp-version")
parser.add_argument("--rp-git-commit")
parser.add_argument("--rp-mode", default="standard")
parser.add_argument("--protocol-mode", default="rrdp+rsync")
parser.add_argument("--strict-policies", default="")
parser.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
parser.add_argument("--argv-json")
parser.add_argument("--env-json")
parser.add_argument("--cwd", default=os.getcwd())
parser.add_argument("--reset-before-run", action="store_true")
parser.add_argument("--state-root")
parser.add_argument("--db")
parser.add_argument("--repo-bytes-db")
parser.add_argument("--raw-store-db")
parser.add_argument("--rsync-mirror-root")
parser.add_argument("--cache-root")
parser.add_argument("--ccr")
parser.add_argument("--cir")
parser.add_argument("--fixture-proof")
parser.add_argument("--fixture-proof-summary-json")
parser.add_argument("--report-json")
parser.add_argument("--stage-timing-json")
parser.add_argument("--stdout-log")
parser.add_argument("--stderr-log")
parser.add_argument("--process-time")
parser.add_argument("--vrps-csv")
parser.add_argument("--vaps-csv")
parser.add_argument("--exit-code", type=int)
parser.add_argument("--wall-ms", type=int)
parser.add_argument("--max-rss-kb", type=int)
parser.add_argument("--vrps", type=int)
parser.add_argument("--vaps", type=int)
parser.add_argument("--publication-points", type=int)
parser.add_argument("--warnings", type=int)
parser.add_argument("--cir-object-count", type=int)
parser.add_argument("--cir-reject-count", type=int)
parser.add_argument("--cir-trust-anchor-count", type=int)
parser.add_argument("--ccr-state-digest")
parser.add_argument("--host")
parser.add_argument("--platform")
def main() -> None:
parser = argparse.ArgumentParser(description="Feature #035 CCR/CIR experiment bundle helpers")
subparsers = parser.add_subparsers(dest="command", required=True)
fixture = subparsers.add_parser("fixture-proof")
fixture.add_argument("--fixture-dir", default="tests/fixtures")
fixture.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
fixture.add_argument("--repo-root")
fixture.add_argument("--out", required=True)
fixture.add_argument("--ta-online-fetch-observed", action="store_true")
fixture.set_defaults(func=command_fixture_proof)
run_meta = subparsers.add_parser("run-meta")
add_run_meta_args(run_meta)
run_meta.set_defaults(func=command_run_meta)
dry_run = subparsers.add_parser("dry-run-bundle")
dry_run.add_argument("--out-dir", required=True)
dry_run.add_argument("--repo-root", default=".")
dry_run.add_argument("--fixture-dir", default="tests/fixtures")
dry_run.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
dry_run.add_argument("--experiment-id", default="m2-dry-run")
dry_run.set_defaults(func=command_dry_run_bundle)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()