20260512 完成CCR CIR行为差异实验能力

This commit is contained in:
yuyr 2026-05-13 05:11:17 +08:00
parent f2fbb20a29
commit 137b3516d0
8 changed files with 3775 additions and 7 deletions

View File

@ -27,7 +27,7 @@ cleanup() {
}
trap cleanup EXIT
IGNORE_REGEX='src/bin/repository_view_stats\.rs|src/bin/trace_arin_missing_vrps\.rs|src/bin/db_stats\.rs|src/bin/rrdp_state_dump\.rs|src/bin/ccr_dump\.rs|src/bin/ccr_verify\.rs|src/bin/ccr_to_routinator_csv\.rs|src/bin/ccr_to_compare_views\.rs|src/bin/cir_materialize\.rs|src/bin/cir_extract_inputs\.rs|src/bin/cir_drop_report\.rs|src/bin/cir_ta_only_fixture\.rs|src/ccr/compare_view\.rs|src/progress_log\.rs|src/cli\.rs|src/validation/run_tree_from_tal\.rs|src/validation/tree_parallel\.rs|src/validation/from_tal\.rs|src/sync/store_projection\.rs|src/cir/materialize\.rs'
IGNORE_REGEX='src/bin/repository_view_stats\.rs|src/bin/trace_arin_missing_vrps\.rs|src/bin/db_stats\.rs|src/bin/rrdp_state_dump\.rs|src/bin/ccr_dump\.rs|src/bin/ccr_verify\.rs|src/bin/ccr_to_routinator_csv\.rs|src/bin/ccr_to_compare_views\.rs|src/bin/cir_materialize\.rs|src/bin/cir_extract_inputs\.rs|src/bin/cir_drop_report\.rs|src/bin/cir_ta_only_fixture\.rs|src/bin/cir_dump_reject_list\.rs|src/bin/rpki_object_parse\.rs|src/bin/triage_ccr_cir_pair\.rs|src/ccr/compare_view\.rs|src/progress_log\.rs|src/cli\.rs|src/validation/run_tree_from_tal\.rs|src/validation/tree_parallel\.rs|src/validation/from_tal\.rs|src/sync/store_projection\.rs|src/cir/materialize\.rs'
# Preserve colored output even though we post-process output by running under a pseudo-TTY.
# We run tests only once, then generate both CLI text + HTML reports without rerunning tests.

View File

@ -0,0 +1,41 @@
{
"schemaVersion": 1,
"defaultRirs": ["afrinic", "apnic", "arin", "lacnic", "ripe"],
"experiments": [
{
"id": "sync-ours-rsync-only",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "ours", "mode": "standard", "protocol": "rsync-only" }
},
{
"id": "sync-rpki-client-rsync-only",
"left": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rsync-only" }
},
{
"id": "strict-name",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "ours", "mode": "strict-name", "protocol": "rrdp+rsync" }
},
{
"id": "strict-cms-der",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "ours", "mode": "strict-cms-der", "protocol": "rrdp+rsync" }
},
{
"id": "strict-signed-attrs",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "ours", "mode": "strict-signed-attrs", "protocol": "rrdp+rsync" }
},
{
"id": "strict-all",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "ours", "mode": "strict-all", "protocol": "rrdp+rsync" }
},
{
"id": "rp-implementation-standard",
"left": { "rpKind": "ours", "mode": "standard", "protocol": "rrdp+rsync" },
"right": { "rpKind": "rpki-client", "mode": "standard", "protocol": "rrdp+rsync" }
}
]
}

View File

@ -0,0 +1,431 @@
#!/usr/bin/env python3
import argparse
import hashlib
import json
import os
import platform
import subprocess
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
RIR_FIXTURES = {
"afrinic": {
"tal": "tal/afrinic.tal",
"ta": "ta/afrinic-ta.cer",
},
"apnic": {
"tal": "tal/apnic-rfc7730-https.tal",
"ta": "ta/apnic-ta.cer",
},
"arin": {
"tal": "tal/arin.tal",
"ta": "ta/arin-ta.cer",
},
"lacnic": {
"tal": "tal/lacnic.tal",
"ta": "ta/lacnic-ta.cer",
},
"ripe": {
"tal": "tal/ripe-ncc.tal",
"ta": "ta/ripe-ncc-ta.cer",
},
}
def utc_now() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
def sha256_file(path: Path) -> str:
hasher = hashlib.sha256()
with path.open("rb") as file:
for chunk in iter(lambda: file.read(1024 * 1024), b""):
hasher.update(chunk)
return hasher.hexdigest()
def read_tal_uris(path: Path) -> list[str]:
uris: list[str] = []
with path.open("r", encoding="utf-8") as file:
for line in file:
item = line.strip()
if not item or item.startswith("#"):
if uris:
break
continue
if item.startswith(("rsync://", "https://", "http://")):
uris.append(item)
continue
if uris:
break
return uris
def first_uri(uris: list[str], prefixes: tuple[str, ...]) -> str | None:
for uri in uris:
if uri.startswith(prefixes):
return uri
return None
def parse_rirs(raw: str) -> list[str]:
rirs = [item.strip().lower() for item in raw.split(",") if item.strip()]
if not rirs:
raise SystemExit("RIR list must not be empty")
invalid = [item for item in rirs if item not in RIR_FIXTURES]
if invalid:
raise SystemExit(
f"invalid RIR(s): {','.join(invalid)}; allowed: {','.join(RIR_FIXTURES)}"
)
return rirs
def rel_or_abs(path: Path, root: Path | None) -> str:
path = path.resolve()
if root is not None:
try:
return path.relative_to(root.resolve()).as_posix()
except ValueError:
pass
return path.as_posix()
def git_commit(repo_root: Path) -> str | None:
try:
return subprocess.check_output(
["git", "-C", str(repo_root), "rev-parse", "--short", "HEAD"],
text=True,
stderr=subprocess.DEVNULL,
).strip()
except (subprocess.CalledProcessError, FileNotFoundError):
return None
def write_json(path: Path, value: dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(value, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
def build_fixture_proof(
fixture_dir: Path,
rirs: list[str],
repo_root: Path | None,
ta_online_fetch_observed: bool,
) -> dict[str, Any]:
trust_anchors = []
for rir in rirs:
mapping = RIR_FIXTURES[rir]
tal_path = fixture_dir / mapping["tal"]
ta_path = fixture_dir / mapping["ta"]
if not tal_path.is_file():
raise SystemExit(f"missing TAL fixture for {rir}: {tal_path}")
if not ta_path.is_file():
raise SystemExit(f"missing TA fixture for {rir}: {ta_path}")
tal_uris = read_tal_uris(tal_path)
trust_anchors.append(
{
"rir": rir,
"talPath": rel_or_abs(tal_path, repo_root),
"taPath": rel_or_abs(ta_path, repo_root),
"talUri": first_uri(tal_uris, ("https://", "http://")),
"taRsyncUri": first_uri(tal_uris, ("rsync://",)),
"talSha256": sha256_file(tal_path),
"taCertificateSha256": sha256_file(ta_path),
"talBytes": tal_path.stat().st_size,
"taCertificateBytes": ta_path.stat().st_size,
"taFixturePinned": not ta_online_fetch_observed,
"taOnlineFetchObserved": ta_online_fetch_observed,
}
)
return {
"schemaVersion": 1,
"generatedBy": "feature035-experiment-driver",
"generatedAtUtc": utc_now(),
"fixtureDir": rel_or_abs(fixture_dir, repo_root),
"all5": set(rirs) == set(RIR_FIXTURES),
"rirs": rirs,
"trustAnchors": trust_anchors,
}
def parse_csv(raw: str) -> list[str]:
if not raw:
return []
return [item.strip() for item in raw.split(",") if item.strip()]
def optional_path(raw: str | None, repo_root: Path | None) -> str | None:
if raw is None:
return None
return rel_or_abs(Path(raw), repo_root)
def build_run_meta(args: argparse.Namespace) -> dict[str, Any]:
rirs = parse_rirs(args.rirs)
repo_root = Path(args.repo_root).resolve() if args.repo_root else None
argv = json.loads(args.argv_json) if args.argv_json else []
env_whitelist = json.loads(args.env_json) if args.env_json else {}
fixture_proof_summary = (
json.loads(args.fixture_proof_summary_json)
if args.fixture_proof_summary_json
else None
)
fixture_proof = None
if args.fixture_proof:
fixture_proof_path = Path(args.fixture_proof)
if fixture_proof_path.is_file():
fixture_proof = json.loads(fixture_proof_path.read_text(encoding="utf-8"))
if not isinstance(argv, list):
raise SystemExit("--argv-json must decode to a JSON array")
if not isinstance(env_whitelist, dict):
raise SystemExit("--env-json must decode to a JSON object")
if fixture_proof_summary is not None and not isinstance(fixture_proof_summary, dict):
raise SystemExit("--fixture-proof-summary-json must decode to a JSON object")
return {
"schemaVersion": 1,
"generatedBy": "feature035-experiment-driver",
"generatedAtUtc": utc_now(),
"experimentId": args.experiment_id,
"side": args.side,
"sideLabel": args.side_label,
"step": args.step,
"runId": args.run_id,
"liveRun": not args.replay_used,
"replayUsed": args.replay_used,
"rp": {
"kind": args.rp_kind,
"binary": args.rp_binary,
"version": args.rp_version,
"gitCommit": args.rp_git_commit,
"mode": args.rp_mode,
"protocolMode": args.protocol_mode,
"strictPolicies": parse_csv(args.strict_policies),
},
"scope": {
"rirs": rirs,
"all5": set(rirs) == set(RIR_FIXTURES),
},
"command": {
"argv": argv,
"cwd": args.cwd,
"envWhitelist": env_whitelist,
},
"state": {
"resetBeforeRun": args.reset_before_run,
"stateRoot": args.state_root,
"db": args.db,
"repoBytesDb": args.repo_bytes_db,
"rawStoreDb": args.raw_store_db,
"rsyncMirrorRoot": args.rsync_mirror_root,
"cacheRoot": args.cache_root,
},
"artifacts": {
"ccr": optional_path(args.ccr, repo_root),
"cir": optional_path(args.cir, repo_root),
"runMeta": optional_path(str(args.out), repo_root),
"fixtureProof": optional_path(args.fixture_proof, repo_root),
"reportJson": optional_path(args.report_json, repo_root),
"stageTimingJson": optional_path(args.stage_timing_json, repo_root),
"stdoutLog": optional_path(args.stdout_log, repo_root),
"stderrLog": optional_path(args.stderr_log, repo_root),
"processTime": optional_path(args.process_time, repo_root),
"vrpsCsv": optional_path(args.vrps_csv, repo_root),
"vapsCsv": optional_path(args.vaps_csv, repo_root),
},
"fixtureProof": fixture_proof,
"fixtureProofSummary": fixture_proof_summary,
"metrics": {
"exitCode": args.exit_code,
"wallMs": args.wall_ms,
"maxRssKb": args.max_rss_kb,
"vrps": args.vrps,
"vaps": args.vaps,
"publicationPoints": args.publication_points,
"warnings": args.warnings,
"cirObjectCount": args.cir_object_count,
"cirRejectCount": args.cir_reject_count,
"cirTrustAnchorCount": args.cir_trust_anchor_count,
"ccrStateDigest": args.ccr_state_digest,
},
"environment": {
"host": args.host or platform.node(),
"platform": args.platform or platform.platform(),
},
}
def command_fixture_proof(args: argparse.Namespace) -> None:
repo_root = Path(args.repo_root).resolve() if args.repo_root else None
proof = build_fixture_proof(
fixture_dir=Path(args.fixture_dir),
rirs=parse_rirs(args.rirs),
repo_root=repo_root,
ta_online_fetch_observed=args.ta_online_fetch_observed,
)
write_json(Path(args.out), proof)
def command_run_meta(args: argparse.Namespace) -> None:
write_json(Path(args.out), build_run_meta(args))
def command_dry_run_bundle(args: argparse.Namespace) -> None:
out_dir = Path(args.out_dir)
repo_root = Path(args.repo_root).resolve() if args.repo_root else Path.cwd().resolve()
fixture_proof = out_dir / "fixture-proof.json"
command_fixture_proof(
argparse.Namespace(
fixture_dir=args.fixture_dir,
rirs=args.rirs,
repo_root=str(repo_root),
out=str(fixture_proof),
ta_online_fetch_observed=False,
)
)
for side, side_label in (("left", "A"), ("right", "B")):
run_dir = out_dir / side_label / "snapshot"
meta_args = argparse.Namespace(
out=run_dir / "run-meta.json",
repo_root=str(repo_root),
experiment_id=args.experiment_id,
side=side,
side_label=side_label,
step="snapshot",
run_id=f"{side_label}-snapshot-dry-run",
replay_used=False,
rp_kind="ours" if side_label == "A" else "rpki-client",
rp_binary=f"bin/{'rpki' if side_label == 'A' else 'rpki-client'}",
rp_version="dry-run",
rp_git_commit=git_commit(repo_root),
rp_mode="standard",
protocol_mode="rrdp+rsync",
strict_policies="",
rirs=args.rirs,
argv_json=json.dumps(["dry-run"]),
env_json=json.dumps({"RPKI_PROGRESS_LOG": "1"}),
cwd=str(out_dir),
reset_before_run=True,
state_root=str(out_dir / side_label / "state"),
db=str(out_dir / side_label / "state" / "work-db"),
repo_bytes_db=str(out_dir / side_label / "state" / "repo-bytes.db"),
raw_store_db=str(out_dir / side_label / "state" / "raw-store.db"),
rsync_mirror_root=str(out_dir / side_label / "state" / "rsync-mirror"),
cache_root=str(out_dir / side_label / "state" / "cache"),
ccr=str(run_dir / "result.ccr"),
cir=str(run_dir / "result.cir"),
fixture_proof=str(fixture_proof),
report_json=str(run_dir / "report.json"),
stage_timing_json=str(run_dir / "stage-timing.json"),
stdout_log=str(run_dir / "stdout.log"),
stderr_log=str(run_dir / "stderr.log"),
process_time=str(run_dir / "process-time.txt"),
vrps_csv=str(run_dir / "vrps.csv"),
vaps_csv=str(run_dir / "vaps.csv"),
exit_code=0,
wall_ms=0,
max_rss_kb=0,
vrps=0,
vaps=0,
publication_points=0,
warnings=0,
cir_object_count=0,
cir_reject_count=0,
cir_trust_anchor_count=len(parse_rirs(args.rirs)),
ccr_state_digest=None,
fixture_proof_summary_json=json.dumps(
{
"taFixturePinned": True,
"taOnlineFetchObserved": False,
"trustAnchorCount": len(parse_rirs(args.rirs)),
}
),
)
command_run_meta(meta_args)
def add_run_meta_args(parser: argparse.ArgumentParser) -> None:
parser.add_argument("--out", required=True)
parser.add_argument("--repo-root")
parser.add_argument("--experiment-id", required=True)
parser.add_argument("--side", choices=["left", "right"], required=True)
parser.add_argument("--side-label", choices=["A", "B"], required=True)
parser.add_argument("--step", choices=["snapshot", "delta"], required=True)
parser.add_argument("--run-id", required=True)
parser.add_argument("--replay-used", action="store_true")
parser.add_argument("--rp-kind", required=True)
parser.add_argument("--rp-binary", required=True)
parser.add_argument("--rp-version")
parser.add_argument("--rp-git-commit")
parser.add_argument("--rp-mode", default="standard")
parser.add_argument("--protocol-mode", default="rrdp+rsync")
parser.add_argument("--strict-policies", default="")
parser.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
parser.add_argument("--argv-json")
parser.add_argument("--env-json")
parser.add_argument("--cwd", default=os.getcwd())
parser.add_argument("--reset-before-run", action="store_true")
parser.add_argument("--state-root")
parser.add_argument("--db")
parser.add_argument("--repo-bytes-db")
parser.add_argument("--raw-store-db")
parser.add_argument("--rsync-mirror-root")
parser.add_argument("--cache-root")
parser.add_argument("--ccr")
parser.add_argument("--cir")
parser.add_argument("--fixture-proof")
parser.add_argument("--fixture-proof-summary-json")
parser.add_argument("--report-json")
parser.add_argument("--stage-timing-json")
parser.add_argument("--stdout-log")
parser.add_argument("--stderr-log")
parser.add_argument("--process-time")
parser.add_argument("--vrps-csv")
parser.add_argument("--vaps-csv")
parser.add_argument("--exit-code", type=int)
parser.add_argument("--wall-ms", type=int)
parser.add_argument("--max-rss-kb", type=int)
parser.add_argument("--vrps", type=int)
parser.add_argument("--vaps", type=int)
parser.add_argument("--publication-points", type=int)
parser.add_argument("--warnings", type=int)
parser.add_argument("--cir-object-count", type=int)
parser.add_argument("--cir-reject-count", type=int)
parser.add_argument("--cir-trust-anchor-count", type=int)
parser.add_argument("--ccr-state-digest")
parser.add_argument("--host")
parser.add_argument("--platform")
def main() -> None:
parser = argparse.ArgumentParser(description="Feature #035 CCR/CIR experiment bundle helpers")
subparsers = parser.add_subparsers(dest="command", required=True)
fixture = subparsers.add_parser("fixture-proof")
fixture.add_argument("--fixture-dir", default="tests/fixtures")
fixture.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
fixture.add_argument("--repo-root")
fixture.add_argument("--out", required=True)
fixture.add_argument("--ta-online-fetch-observed", action="store_true")
fixture.set_defaults(func=command_fixture_proof)
run_meta = subparsers.add_parser("run-meta")
add_run_meta_args(run_meta)
run_meta.set_defaults(func=command_run_meta)
dry_run = subparsers.add_parser("dry-run-bundle")
dry_run.add_argument("--out-dir", required=True)
dry_run.add_argument("--repo-root", default=".")
dry_run.add_argument("--fixture-dir", default="tests/fixtures")
dry_run.add_argument("--rirs", default="afrinic,apnic,arin,lacnic,ripe")
dry_run.add_argument("--experiment-id", default="m2-dry-run")
dry_run.set_defaults(func=command_dry_run_bundle)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,10 @@
{
"schemaVersion": 1,
"rirs": {
"afrinic": { "tal": "tal/afrinic.tal", "ta": "ta/afrinic-ta.cer" },
"apnic": { "tal": "tal/apnic-rfc7730-https.tal", "ta": "ta/apnic-ta.cer" },
"arin": { "tal": "tal/arin.tal", "ta": "ta/arin-ta.cer" },
"lacnic": { "tal": "tal/lacnic.tal", "ta": "ta/lacnic-ta.cer" },
"ripe": { "tal": "tal/ripe-ncc.tal", "ta": "ta/ripe-ncc-ta.cer" }
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,595 @@
use std::net::{Ipv4Addr, Ipv6Addr};
use std::path::{Path, PathBuf};
use rpki::data_model::aspa::AspaObject;
use rpki::data_model::crl::RpkixCrl;
use rpki::data_model::manifest::ManifestObject;
use rpki::data_model::rc::{
AccessDescription, RcExtensions, ResourceCertificate, SubjectInfoAccess,
};
use rpki::data_model::roa::{IpPrefix as RoaIpPrefix, RoaAfi, RoaObject};
use rpki::data_model::signed_object::{
ResourceEeCertificate, RpkiSignedObject, SignedAttrsProfiled, SignerInfoProfiled,
};
use rpki::data_model::ta::TaCertificate;
use serde_json::{Value, json};
use sha2::{Digest, Sha256};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ObjectType {
Auto,
Cer,
Mft,
Crl,
Roa,
Aspa,
}
#[derive(Debug, PartialEq, Eq)]
struct Args {
object_type: ObjectType,
input_path: Option<PathBuf>,
out_path: Option<PathBuf>,
pretty: bool,
entry_limit: usize,
}
impl Default for Args {
fn default() -> Self {
Self {
object_type: ObjectType::Auto,
input_path: None,
out_path: None,
pretty: true,
entry_limit: 50,
}
}
}
fn usage() -> &'static str {
"Usage: rpki_object_parse --type auto|cer|mft|crl|roa|asa|aspa --input <object-file> [--out <parsed.json>] [--entry-limit <n|all>] [--compact]"
}
fn main() {
if let Err(err) = real_main() {
eprintln!("{err}");
std::process::exit(1);
}
}
fn real_main() -> Result<(), String> {
let args = parse_args(&std::env::args().collect::<Vec<_>>())?;
let input_path = args.input_path.as_ref().expect("validated");
let bytes = std::fs::read(input_path)
.map_err(|e| format!("read input failed: {}: {e}", input_path.display()))?;
let object_type = resolve_object_type(args.object_type, input_path)?;
let parsed = parse_object_json(object_type, input_path, &bytes, args.entry_limit);
let rendered = if args.pretty {
serde_json::to_string_pretty(&parsed).map_err(|e| e.to_string())?
} else {
serde_json::to_string(&parsed).map_err(|e| e.to_string())?
};
if let Some(out_path) = args.out_path.as_ref() {
if let Some(parent) = out_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create output parent failed: {}: {e}", parent.display()))?;
}
std::fs::write(out_path, rendered)
.map_err(|e| format!("write output failed: {}: {e}", out_path.display()))?;
} else {
println!("{rendered}");
}
Ok(())
}
fn parse_args(argv: &[String]) -> Result<Args, String> {
let mut args = Args::default();
let mut index = 1usize;
while index < argv.len() {
match argv[index].as_str() {
"--type" => {
index += 1;
let value = argv.get(index).ok_or("--type requires a value")?;
args.object_type = parse_object_type(value)?;
}
"--input" | "--in" => {
index += 1;
args.input_path = Some(PathBuf::from(
argv.get(index).ok_or("--input requires a value")?,
));
}
"--out" => {
index += 1;
args.out_path = Some(PathBuf::from(
argv.get(index).ok_or("--out requires a value")?,
));
}
"--entry-limit" => {
index += 1;
let value = argv.get(index).ok_or("--entry-limit requires a value")?;
args.entry_limit = parse_limit(value)?;
}
"--all" => {
args.entry_limit = usize::MAX;
}
"--compact" => {
args.pretty = false;
}
"--pretty" => {
args.pretty = true;
}
"-h" | "--help" => return Err(usage().to_string()),
other => return Err(format!("unknown argument: {other}\n{}", usage())),
}
index += 1;
}
if args.input_path.is_none() {
return Err(format!("--input is required\n{}", usage()));
}
Ok(args)
}
fn parse_object_type(value: &str) -> Result<ObjectType, String> {
match value.to_ascii_lowercase().as_str() {
"auto" => Ok(ObjectType::Auto),
"cer" | ".cer" | "cert" | "certificate" => Ok(ObjectType::Cer),
"mft" | ".mft" | "manifest" => Ok(ObjectType::Mft),
"crl" | ".crl" => Ok(ObjectType::Crl),
"roa" | ".roa" => Ok(ObjectType::Roa),
"asa" | ".asa" | "aspa" => Ok(ObjectType::Aspa),
_ => Err(format!("unsupported --type: {value}\n{}", usage())),
}
}
fn parse_limit(value: &str) -> Result<usize, String> {
if value.eq_ignore_ascii_case("all") {
return Ok(usize::MAX);
}
value
.parse::<usize>()
.map_err(|_| format!("invalid --entry-limit: {value}"))
}
fn resolve_object_type(object_type: ObjectType, path: &Path) -> Result<ObjectType, String> {
if object_type != ObjectType::Auto {
return Ok(object_type);
}
match path
.extension()
.and_then(|v| v.to_str())
.map(|v| v.to_ascii_lowercase())
.as_deref()
{
Some("cer") => Ok(ObjectType::Cer),
Some("mft") => Ok(ObjectType::Mft),
Some("crl") => Ok(ObjectType::Crl),
Some("roa") => Ok(ObjectType::Roa),
Some("asa") | Some("aspa") => Ok(ObjectType::Aspa),
_ => Err(format!(
"cannot infer object type from path: {}",
path.display()
)),
}
}
fn parse_object_json(
object_type: ObjectType,
input_path: &Path,
bytes: &[u8],
entry_limit: usize,
) -> Value {
let object = match object_type {
ObjectType::Auto => unreachable!("auto must be resolved"),
ObjectType::Cer => parse_cer_json(bytes),
ObjectType::Mft => parse_mft_json(bytes, entry_limit),
ObjectType::Crl => parse_crl_json(bytes, entry_limit),
ObjectType::Roa => parse_roa_json(bytes, entry_limit),
ObjectType::Aspa => parse_aspa_json(bytes, entry_limit),
};
json!({
"tool": "rpki_object_parse",
"schemaVersion": 1,
"input": {
"path": input_path.display().to_string(),
"type": object_type_label(object_type),
"bytes": bytes_summary(bytes),
},
"object": object,
})
}
fn parse_cer_json(bytes: &[u8]) -> Value {
match ResourceCertificate::decode_der(bytes) {
Ok(cert) => {
let ta_profile = match TaCertificate::decode_der(bytes) {
Ok(ta) => json!({
"valid": true,
"selfSignature": result_json(ta.verify_self_signature().map_err(|e| e.to_string())),
}),
Err(err) => json!({
"valid": false,
"error": err.to_string(),
}),
};
json!({
"type": "cer",
"decode": {"profileValid": true},
"resourceCertificate": resource_certificate_json(&cert),
"trustAnchorProfile": ta_profile,
})
}
Err(err) => json!({
"type": "cer",
"decode": {"profileValid": false, "error": err.to_string()},
}),
}
}
fn parse_mft_json(bytes: &[u8], entry_limit: usize) -> Value {
match ManifestObject::decode_der(bytes) {
Ok(mft) => {
let files = mft.manifest.parse_files();
let (file_sample, file_list_error) = match files {
Ok(entries) => (
json!({
"count": entries.len(),
"truncated": entries.len() > entry_limit,
"entries": entries.iter().take(entry_limit).map(|item| {
json!({"fileName": item.file_name, "hashHex": hex::encode(item.hash_bytes)})
}).collect::<Vec<_>>(),
}),
Value::Null,
),
Err(err) => (Value::Null, json!(err.to_string())),
};
json!({
"type": "mft",
"decode": {"profileValid": true},
"eContentType": mft.econtent_type,
"signedObject": signed_object_json(&mft.signed_object),
"manifest": {
"version": mft.manifest.version,
"manifestNumberHex": mft.manifest.manifest_number.to_hex_upper(),
"thisUpdate": format_time(mft.manifest.this_update),
"nextUpdate": format_time(mft.manifest.next_update),
"fileHashAlg": mft.manifest.file_hash_alg,
"fileCount": mft.manifest.file_count(),
"fileList": file_sample,
"fileListError": file_list_error,
},
"embeddedEeProfile": result_json(mft.validate_embedded_ee_cert().map_err(|e| e.to_string())),
"cmsSignature": result_json(mft.signed_object.verify_signature().map_err(|e| e.to_string())),
})
}
Err(err) => json!({
"type": "mft",
"decode": {"profileValid": false, "error": err.to_string()},
}),
}
}
fn parse_crl_json(bytes: &[u8], entry_limit: usize) -> Value {
match RpkixCrl::decode_der(bytes) {
Ok(crl) => json!({
"type": "crl",
"decode": {"profileValid": true},
"rawDer": bytes_summary(&crl.raw_der),
"version": crl.version,
"issuer": crl.issuer_dn,
"signatureAlgorithm": crl.signature_algorithm_oid,
"thisUpdate": format_time(crl.this_update.utc),
"nextUpdate": format_time(crl.next_update.utc),
"extensions": {
"authorityKeyIdentifier": hex::encode(&crl.extensions.authority_key_identifier),
"crlNumberHex": crl.extensions.crl_number.to_hex_upper(),
"crlNumber": crl.extensions.crl_number.to_u64(),
},
"revokedCertificates": {
"count": crl.revoked_certs.len(),
"truncated": crl.revoked_certs.len() > entry_limit,
"entries": crl.revoked_certs.iter().take(entry_limit).map(|item| {
json!({
"serialNumberHex": item.serial_number.to_hex_upper(),
"serialNumber": item.serial_number.to_u64(),
"revocationDate": format_time(item.revocation_date.utc),
})
}).collect::<Vec<_>>(),
},
}),
Err(err) => json!({
"type": "crl",
"decode": {"profileValid": false, "error": err.to_string()},
}),
}
}
fn parse_roa_json(bytes: &[u8], entry_limit: usize) -> Value {
match RoaObject::decode_der(bytes) {
Ok(roa) => json!({
"type": "roa",
"decode": {"profileValid": true},
"eContentType": roa.econtent_type,
"signedObject": signed_object_json(&roa.signed_object),
"roa": {
"version": roa.roa.version,
"asId": roa.roa.as_id,
"ipAddressFamilies": roa.roa.ip_addr_blocks.iter().map(|family| {
json!({
"afi": format!("{:?}", family.afi),
"addressCount": family.addresses.len(),
"truncated": family.addresses.len() > entry_limit,
"addresses": family.addresses.iter().take(entry_limit).map(|entry| {
json!({
"prefix": roa_prefix_string(&entry.prefix),
"maxLength": entry.max_length,
})
}).collect::<Vec<_>>(),
})
}).collect::<Vec<_>>(),
},
"embeddedEeProfile": result_json(roa.validate_embedded_ee_cert().map_err(|e| e.to_string())),
"cmsSignature": result_json(roa.signed_object.verify_signature().map_err(|e| e.to_string())),
}),
Err(err) => json!({
"type": "roa",
"decode": {"profileValid": false, "error": err.to_string()},
}),
}
}
fn parse_aspa_json(bytes: &[u8], entry_limit: usize) -> Value {
match AspaObject::decode_der(bytes) {
Ok(aspa) => json!({
"type": "aspa",
"decode": {"profileValid": true},
"eContentType": aspa.econtent_type,
"signedObject": signed_object_json(&aspa.signed_object),
"aspa": {
"version": aspa.aspa.version,
"customerAsId": aspa.aspa.customer_as_id,
"providerCount": aspa.aspa.provider_as_ids.len(),
"providersTruncated": aspa.aspa.provider_as_ids.len() > entry_limit,
"providerAsIds": aspa.aspa.provider_as_ids.iter().take(entry_limit).copied().collect::<Vec<_>>(),
},
"embeddedEeProfile": result_json(aspa.validate_embedded_ee_cert().map_err(|e| e.to_string())),
"cmsSignature": result_json(aspa.signed_object.verify_signature().map_err(|e| e.to_string())),
}),
Err(err) => json!({
"type": "aspa",
"decode": {"profileValid": false, "error": err.to_string()},
}),
}
}
fn resource_certificate_json(cert: &ResourceCertificate) -> Value {
let tbs = &cert.tbs;
json!({
"rawDer": bytes_summary(&cert.raw_der),
"kind": format!("{:?}", cert.kind),
"version": tbs.version,
"serialNumberHex": hex::encode(tbs.serial_number.to_bytes_be()),
"signatureAlgorithm": tbs.signature_algorithm,
"issuer": tbs.issuer_name.to_string(),
"subject": tbs.subject_name.to_string(),
"validity": {
"notBefore": format_time(tbs.validity_not_before),
"notAfter": format_time(tbs.validity_not_after),
},
"subjectPublicKeyInfo": bytes_summary(&tbs.subject_public_key_info),
"extensions": rc_extensions_json(&tbs.extensions),
})
}
fn rc_extensions_json(ext: &RcExtensions) -> Value {
json!({
"basicConstraintsCa": ext.basic_constraints_ca,
"subjectKeyIdentifier": ext.subject_key_identifier.as_ref().map(|v| hex::encode(v)),
"authorityKeyIdentifier": ext.authority_key_identifier.as_ref().map(|v| hex::encode(v)),
"crlDistributionPointsUris": ext.crl_distribution_points_uris,
"caIssuersUris": ext.ca_issuers_uris,
"subjectInfoAccess": subject_info_access_json(ext.subject_info_access.as_ref()),
"certificatePoliciesOid": ext.certificate_policies_oid,
"ipResources": serde_json::to_value(&ext.ip_resources).unwrap_or(Value::Null),
"asResources": serde_json::to_value(&ext.as_resources).unwrap_or(Value::Null),
})
}
fn subject_info_access_json(value: Option<&SubjectInfoAccess>) -> Value {
match value {
None => Value::Null,
Some(SubjectInfoAccess::Ca(ca)) => json!({
"kind": "ca",
"accessDescriptions": ca.access_descriptions.iter().map(access_description_json).collect::<Vec<_>>(),
}),
Some(SubjectInfoAccess::Ee(ee)) => json!({
"kind": "ee",
"signedObjectUris": ee.signed_object_uris,
"accessDescriptions": ee.access_descriptions.iter().map(access_description_json).collect::<Vec<_>>(),
}),
}
}
fn access_description_json(value: &AccessDescription) -> Value {
json!({
"accessMethodOid": value.access_method_oid,
"accessLocation": value.access_location,
})
}
fn signed_object_json(signed_object: &RpkiSignedObject) -> Value {
let signed_data = &signed_object.signed_data;
json!({
"rawDer": bytes_summary(&signed_object.raw_der),
"contentInfoContentType": signed_object.content_info_content_type,
"signedData": {
"version": signed_data.version,
"digestAlgorithms": signed_data.digest_algorithms,
"encapContentInfo": {
"eContentType": signed_data.encap_content_info.econtent_type,
"eContent": bytes_summary(&signed_data.encap_content_info.econtent),
},
"certificates": signed_data.certificates.iter().map(ee_certificate_json).collect::<Vec<_>>(),
"crlsPresent": signed_data.crls_present,
"signerInfos": signed_data.signer_infos.iter().map(signer_info_json).collect::<Vec<_>>(),
},
})
}
fn ee_certificate_json(cert: &ResourceEeCertificate) -> Value {
json!({
"rawDer": bytes_summary(&cert.raw_der),
"subjectKeyIdentifier": hex::encode(&cert.subject_key_identifier),
"spkiDer": bytes_summary(&cert.spki_der),
"rsaPublicKey": {
"modulus": bytes_summary(&cert.rsa_public_modulus),
"exponent": bytes_summary(&cert.rsa_public_exponent),
},
"tbsCertificate": bytes_summary(&cert.tbs_certificate_der),
"certificateSignature": bytes_summary(&cert.signature_bytes),
"keyUsageSummary": format!("{:?}", cert.key_usage_summary),
"siaSignedObjectUris": cert.sia_signed_object_uris,
"resourceCertificate": resource_certificate_json(&cert.resource_cert),
})
}
fn signer_info_json(info: &SignerInfoProfiled) -> Value {
json!({
"version": info.version,
"sidSki": hex::encode(&info.sid_ski),
"digestAlgorithm": info.digest_algorithm,
"signatureAlgorithm": info.signature_algorithm,
"signedAttrs": signed_attrs_json(&info.signed_attrs),
"unsignedAttrsPresent": info.unsigned_attrs_present,
"signature": bytes_summary(&info.signature),
"signedAttrsDerForSignature": bytes_summary(&info.signed_attrs_der_for_signature),
})
}
fn signed_attrs_json(attrs: &SignedAttrsProfiled) -> Value {
json!({
"contentType": attrs.content_type,
"messageDigest": hex::encode(&attrs.message_digest),
"signingTime": {
"utc": format_time(attrs.signing_time.utc),
"encoding": format!("{:?}", attrs.signing_time.encoding),
},
"otherAttrsPresent": attrs.other_attrs_present,
})
}
fn result_json(result: Result<(), String>) -> Value {
match result {
Ok(()) => json!({"valid": true}),
Err(err) => json!({"valid": false, "error": err}),
}
}
fn object_type_label(object_type: ObjectType) -> &'static str {
match object_type {
ObjectType::Auto => "auto",
ObjectType::Cer => "cer",
ObjectType::Mft => "mft",
ObjectType::Crl => "crl",
ObjectType::Roa => "roa",
ObjectType::Aspa => "aspa",
}
}
fn bytes_summary(bytes: &[u8]) -> Value {
let head_len = bytes.len().min(16);
let tail_len = bytes.len().min(16);
json!({
"len": bytes.len(),
"sha256": sha256_hex(bytes),
"headHex": hex::encode(&bytes[..head_len]),
"tailHex": hex::encode(&bytes[bytes.len().saturating_sub(tail_len)..]),
})
}
fn sha256_hex(bytes: &[u8]) -> String {
hex::encode(Sha256::digest(bytes))
}
fn format_time(value: time::OffsetDateTime) -> String {
value
.to_offset(time::UtcOffset::UTC)
.format(&time::format_description::well_known::Rfc3339)
.unwrap_or_else(|_| value.unix_timestamp().to_string())
}
fn roa_prefix_string(prefix: &RoaIpPrefix) -> String {
let bytes = prefix.addr_bytes();
match prefix.afi {
RoaAfi::Ipv4 => {
let octets = [bytes[0], bytes[1], bytes[2], bytes[3]];
format!("{}/{}", Ipv4Addr::from(octets), prefix.prefix_len)
}
RoaAfi::Ipv6 => {
let mut octets = [0u8; 16];
octets.copy_from_slice(bytes);
format!("{}/{}", Ipv6Addr::from(octets), prefix.prefix_len)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_args_accepts_plan_shape() {
let args = parse_args(&[
"rpki_object_parse".to_string(),
"--type".to_string(),
"auto".to_string(),
"--input".to_string(),
"a.roa".to_string(),
"--out".to_string(),
"parsed.json".to_string(),
"--entry-limit".to_string(),
"5".to_string(),
])
.expect("parse args");
assert_eq!(args.object_type, ObjectType::Auto);
assert_eq!(args.input_path.as_deref(), Some(Path::new("a.roa")));
assert_eq!(args.out_path.as_deref(), Some(Path::new("parsed.json")));
assert_eq!(args.entry_limit, 5);
}
#[test]
fn parse_args_accepts_aspa_alias_and_all_limit() {
let args = parse_args(&[
"rpki_object_parse".to_string(),
"--type".to_string(),
"asa".to_string(),
"--in".to_string(),
"a.asa".to_string(),
"--entry-limit".to_string(),
"all".to_string(),
"--compact".to_string(),
])
.expect("parse args");
assert_eq!(args.object_type, ObjectType::Aspa);
assert_eq!(args.entry_limit, usize::MAX);
assert!(!args.pretty);
}
#[test]
fn resolve_auto_type_from_extension() {
assert_eq!(
resolve_object_type(ObjectType::Auto, Path::new("a.mft")).expect("resolve"),
ObjectType::Mft
);
assert_eq!(
resolve_object_type(ObjectType::Auto, Path::new("a.asa")).expect("resolve"),
ObjectType::Aspa
);
}
#[test]
fn unknown_auto_type_is_rejected() {
let err = resolve_object_type(ObjectType::Auto, Path::new("a.bin")).unwrap_err();
assert!(err.contains("cannot infer object type"), "{err}");
}
}

File diff suppressed because it is too large Load Diff

View File

@ -189,12 +189,55 @@ pub fn process_publication_point_for_issuer_with_options<P: PublicationPointData
let mut audit: Vec<ObjectAuditEntry> = Vec::new();
// Enforce that `manifest_bytes` is actually a manifest object.
let _manifest = ManifestObject::decode_der_with_strict_options(
let _manifest = match ManifestObject::decode_der_with_strict_options(
manifest_bytes,
policy.strict.cms_der,
policy.strict.name,
)
.expect("publication point snapshot manifest decodes");
) {
Ok(manifest) => manifest,
Err(e) => {
stats.publication_point_dropped = true;
warnings.push(
Warning::new(format!(
"dropping publication point: manifest decode failed: {e}"
))
.with_rfc_refs(&[
RfcRef("RFC 9286 §4"),
RfcRef("RFC 9286 §6.2"),
RfcRef("RFC 9286 §6.6"),
])
.with_context(manifest_rsync_uri),
);
for f in locked_files {
if f.rsync_uri.ends_with(".roa") {
audit.push(ObjectAuditEntry {
rsync_uri: f.rsync_uri.clone(),
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Roa,
result: AuditObjectResult::Skipped,
detail: Some("skipped: manifest decode failed".to_string()),
});
} else if f.rsync_uri.ends_with(".asa") {
audit.push(ObjectAuditEntry {
rsync_uri: f.rsync_uri.clone(),
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Aspa,
result: AuditObjectResult::Skipped,
detail: Some("skipped: manifest decode failed".to_string()),
});
}
}
return ObjectsOutput {
vrps: Vec::new(),
aspas: Vec::new(),
router_keys: Vec::new(),
local_outputs_cache: Vec::new(),
warnings,
stats,
audit,
};
}
};
// Decode issuer CA once; if it fails we cannot validate ROA/ASPA EE certificates.
let issuer_ca = match decode_resource_certificate_with_policy(issuer_ca_der, policy) {
@ -978,12 +1021,55 @@ pub(crate) fn prepare_publication_point_for_parallel_roa<P: PublicationPointData
.count();
let mut audit: Vec<ObjectAuditEntry> = Vec::new();
let _manifest = ManifestObject::decode_der_with_strict_options(
let _manifest = match ManifestObject::decode_der_with_strict_options(
manifest_bytes,
policy.strict.cms_der,
policy.strict.name,
)
.expect("publication point snapshot manifest decodes");
) {
Ok(manifest) => manifest,
Err(e) => {
stats.publication_point_dropped = true;
warnings.push(
Warning::new(format!(
"dropping publication point: manifest decode failed: {e}"
))
.with_rfc_refs(&[
RfcRef("RFC 9286 §4"),
RfcRef("RFC 9286 §6.2"),
RfcRef("RFC 9286 §6.6"),
])
.with_context(manifest_rsync_uri),
);
for f in locked_files {
if f.rsync_uri.ends_with(".roa") {
audit.push(ObjectAuditEntry {
rsync_uri: f.rsync_uri.clone(),
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Roa,
result: AuditObjectResult::Skipped,
detail: Some("skipped: manifest decode failed".to_string()),
});
} else if f.rsync_uri.ends_with(".asa") {
audit.push(ObjectAuditEntry {
rsync_uri: f.rsync_uri.clone(),
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Aspa,
result: AuditObjectResult::Skipped,
detail: Some("skipped: manifest decode failed".to_string()),
});
}
}
return ParallelObjectsPrepare::Complete(ObjectsOutput {
vrps: Vec::new(),
aspas: Vec::new(),
router_keys: Vec::new(),
local_outputs_cache: Vec::new(),
warnings,
stats,
audit,
});
}
};
let issuer_ca = match decode_resource_certificate_with_policy(issuer_ca_der, policy) {
Ok(v) => v,
@ -2403,7 +2489,11 @@ mod tests {
Afi, AsIdOrRange, AsIdentifierChoice, IpAddressFamily, IpAddressOrRange, IpAddressRange,
IpPrefix, IpResourceSet,
};
use crate::policy::Policy;
use crate::storage::PackTime;
use crate::validation::publication_point::PublicationPointSnapshot;
use std::collections::HashMap;
use time::OffsetDateTime;
fn fixture_bytes(path: &str) -> Vec<u8> {
std::fs::read(std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(path))
@ -2874,4 +2964,41 @@ mod tests {
assert_eq!(roa_afi_to_string(RoaAfi::Ipv4), "ipv4");
assert_eq!(roa_afi_to_string(RoaAfi::Ipv6), "ipv6");
}
#[test]
fn strict_name_manifest_decode_failure_drops_publication_point() {
let publication_point = PublicationPointSnapshot {
format_version: PublicationPointSnapshot::FORMAT_VERSION_V1,
manifest_rsync_uri: "rsync://example.test/repo/manifest.mft".to_string(),
publication_point_rsync_uri: "rsync://example.test/repo/".to_string(),
manifest_number_be: vec![0x01],
this_update: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()),
next_update: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()),
verified_at: PackTime::from_utc_offset_datetime(OffsetDateTime::now_utc()),
manifest_bytes: vec![0x01, 0x02, 0x03],
files: vec![],
};
let policy = Policy::default();
let output = process_publication_point_for_issuer_with_options(
&publication_point,
&policy,
&[],
None,
None,
None,
OffsetDateTime::now_utc(),
None,
false,
);
assert!(output.stats.publication_point_dropped);
assert!(output.vrps.is_empty());
assert!(
output
.warnings
.iter()
.any(|warning| warning.message.contains("manifest decode failed")),
"{:?}",
output.warnings
);
}
}