20260507_2 增加CCR/CIR差异定位链路

This commit is contained in:
yuyr 2026-05-09 10:02:47 +08:00
parent 752e746b97
commit c6b408c0f9
12 changed files with 2810 additions and 247 deletions

View File

@ -12,20 +12,61 @@ Usage:
[--rpki-client-bin <path>] \
[--libtls-path <path>] \
[--rp-run-mode <serial|parallel>] \
[--copy-rpki-client-cache] \
[--probe-rpki-client-cache] \
[--ours-extra-args '<args>'] \
[--dry-run]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
first_existing_executable() {
local fallback="$1"
shift
local candidate
for candidate in "$@"; do
if [[ -x "$candidate" ]]; then
printf '%s' "$candidate"
return
fi
done
printf '%s' "$fallback"
}
first_existing_file() {
local fallback="$1"
shift
local candidate
for candidate in "$@"; do
if [[ -f "$candidate" ]]; then
printf '%s' "$candidate"
return
fi
done
printf '%s' "$fallback"
}
RUN_ROOT=""
REMOTE_ROOT=""
SSH_TARGET="${SSH_TARGET:-root@47.251.56.108}"
RPKI_CLIENT_BIN="${RPKI_CLIENT_BIN:-/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client}"
LIBTLS_PATH="${LIBTLS_PATH:-/home/yuyr/dev/rpki-client-9.7/.deps/libtls/root/usr/lib/x86_64-linux-gnu/libtls.so.28.0.0}"
RPKI_CLIENT_BIN="${RPKI_CLIENT_BIN:-$(first_existing_executable \
"/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client" \
"$ROOT_DIR/../../.cache/rpki-client-9.7-build/bin/rpki-client" \
"$ROOT_DIR/../../.cache/rpki-client-9.7-build/src/rpki-client-9.7/src/rpki-client" \
"$ROOT_DIR/../../.cache/rpki-client-remote9.0/rpki-client" \
"/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client")}"
LIBTLS_PATH="${LIBTLS_PATH:-$(first_existing_file \
"/home/yuyr/dev/rpki-client-9.7/.deps/libtls/root/usr/lib/x86_64-linux-gnu/libtls.so.28.0.0" \
"$ROOT_DIR/../../.cache/rpki-client-9.7-build/runlib/libtls.so.28" \
"$ROOT_DIR/../../.cache/rpki-client-9.7-build/sysroot/usr/lib/x86_64-linux-gnu/libtls.so.28.0.0" \
"$ROOT_DIR/../../.cache/rpki-client-remote9.0/libtls.so.28" \
"/home/yuyr/dev/rpki-client-9.7/.deps/libtls/root/usr/lib/x86_64-linux-gnu/libtls.so.28.0.0")}"
RP_RUN_MODE="${RP_RUN_MODE:-serial}"
RIR_SET="${RIR_SET:-mixed2}"
OURS_EXTRA_ARGS="${OURS_EXTRA_ARGS:-}"
COPY_RPKI_CLIENT_CACHE="${COPY_RPKI_CLIENT_CACHE:-0}"
PROBE_RPKI_CLIENT_CACHE="${PROBE_RPKI_CLIENT_CACHE:-0}"
DRY_RUN=0
while [[ $# -gt 0 ]]; do
@ -37,6 +78,8 @@ while [[ $# -gt 0 ]]; do
--rpki-client-bin) RPKI_CLIENT_BIN="$2"; shift 2 ;;
--libtls-path) LIBTLS_PATH="$2"; shift 2 ;;
--rp-run-mode) RP_RUN_MODE="$2"; shift 2 ;;
--copy-rpki-client-cache) COPY_RPKI_CLIENT_CACHE=1; shift ;;
--probe-rpki-client-cache) PROBE_RPKI_CLIENT_CACHE=1; shift ;;
--ours-extra-args) OURS_EXTRA_ARGS="$2"; shift 2 ;;
--dry-run) DRY_RUN=1; shift ;;
-h|--help) usage; exit 0 ;;
@ -122,11 +165,14 @@ trap cleanup_remote EXIT
(
cd "$ROOT_DIR"
cargo build --release --bin rpki --bin ccr_to_compare_views
cargo build --release --bin rpki --bin ccr_to_compare_views --bin ccr_state_compare --bin cir_state_compare --bin cir_probe_rpki_client_cache
)
ssh "$SSH_TARGET" "set -e; id -u _rpki-client >/dev/null 2>&1 || useradd -r -M -s /usr/sbin/nologin _rpki-client || true; rm -rf '$REMOTE_ROOT'; mkdir -p '$REMOTE_ROOT/bin' '$REMOTE_ROOT/lib' '$REMOTE_ROOT/state/ours' '$REMOTE_ROOT/state/rpki-client' '$REMOTE_ROOT/steps/step-001/ours' '$REMOTE_ROOT/steps/step-001/rpki-client' '$REMOTE_ROOT/steps/step-002/ours' '$REMOTE_ROOT/steps/step-002/rpki-client'"
ssh "$SSH_TARGET" "set -e; systemctl disable --now rpki-client.timer >/dev/null 2>&1 || true; systemctl stop rpki-client.service >/dev/null 2>&1 || true; pkill -f '[/]rpki-client([[:space:]]|$)' >/dev/null 2>&1 || true; pkill -f '[/]routinator([[:space:]]|$)' >/dev/null 2>&1 || true; id -u _rpki-client >/dev/null 2>&1 || useradd -r -M -s /usr/sbin/nologin _rpki-client || true; rm -rf '$REMOTE_ROOT'; mkdir -p '$REMOTE_ROOT/bin' '$REMOTE_ROOT/lib' '$REMOTE_ROOT/state/ours' '$REMOTE_ROOT/state/rpki-client' '$REMOTE_ROOT/steps/step-001/ours' '$REMOTE_ROOT/steps/step-001/rpki-client' '$REMOTE_ROOT/steps/step-002/ours' '$REMOTE_ROOT/steps/step-002/rpki-client'"
scp "$ROOT_DIR/target/release/rpki" "${COPY_FILES[@]}" "$SSH_TARGET:$REMOTE_ROOT/"
if [[ "$PROBE_RPKI_CLIENT_CACHE" == "1" ]]; then
scp "$ROOT_DIR/target/release/cir_probe_rpki_client_cache" "$SSH_TARGET:$REMOTE_ROOT/bin/"
fi
scp "$RPKI_CLIENT_BIN" "$SSH_TARGET:$REMOTE_ROOT/bin/rpki-client"
scp "$LIBTLS_PATH" "$SSH_TARGET:$REMOTE_ROOT/lib/libtls.so.28"
printf '%s' "$OURS_EXTRA_ARGS" | ssh "$SSH_TARGET" "cat > '$REMOTE_ROOT/ours-extra-args.txt'"
@ -146,6 +192,8 @@ KIND="$3"
cd "$REMOTE_ROOT"
mkdir -p "steps/$STEP_ID/ours" "steps/$STEP_ID/rpki-client"
touch rpki-client-skiplist
chmod 0644 rpki-client-skiplist
OURS_EXTRA_ARGS="$(cat ours-extra-args.txt)"
RP_RUN_MODE="$(cat rp-run-mode.txt)"
RIR_SET="$(cat rir-set.txt)"
@ -172,6 +220,17 @@ tal_file_for_rir() {
esac
}
tal_uri_for_rir() {
case "$1" in
afrinic) printf '%s' "https://rpki.afrinic.net/repository/AfriNIC.cer" ;;
apnic) printf '%s' "https://rpki.apnic.net/repository/apnic-rpki-root-iana-origin.cer" ;;
arin) printf '%s' "https://rrdp.arin.net/arin-rpki-ta.cer" ;;
lacnic) printf '%s' "https://rrdp.lacnic.net/ta/rta-lacnic-rpki.cer" ;;
ripe) printf '%s' "https://rpki.ripe.net/ta/ripe-ncc-ta.cer" ;;
*) echo "unknown rir: $1" >&2; exit 2 ;;
esac
}
ta_file_for_rir() {
case "$1" in
afrinic) printf '%s' "afrinic-ta.cer" ;;
@ -185,10 +244,13 @@ ta_file_for_rir() {
OURS_TAL_ARGS=()
CLIENT_TAL_ARGS=()
OURS_CIR_TAL_ARGS=()
for rir in "${RIRS[@]}"; do
tal_file="$(tal_file_for_rir "$rir")"
ta_file="$(ta_file_for_rir "$rir")"
tal_uri="$(tal_uri_for_rir "$rir")"
OURS_TAL_ARGS+=(--tal-path "$tal_file" --ta-path "$ta_file")
OURS_CIR_TAL_ARGS+=(--cir-tal-uri "$tal_uri")
CLIENT_TAL_ARGS+=(-t "../../$tal_file")
done
@ -226,6 +288,9 @@ PY
"${OURS_TAL_ARGS[@]}" \
"${OURS_EXTRA_ARGV[@]}" \
--ccr-out "steps/$STEP_ID/ours/result.ccr" \
--cir-enable \
--cir-out "steps/$STEP_ID/ours/result.cir" \
"${OURS_CIR_TAL_ARGS[@]}" \
--report-json "steps/$STEP_ID/ours/report.json" \
> "steps/$STEP_ID/ours/run.log" 2>&1
exit_code=$?
@ -268,12 +333,14 @@ PY
set +e
LD_LIBRARY_PATH="$REMOTE_ROOT/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" "$REMOTE_ROOT/bin/rpki-client" \
-vv \
-S "$REMOTE_ROOT/rpki-client-skiplist" \
"${CLIENT_TAL_ARGS[@]}" \
-d cache out \
> "$REMOTE_ROOT/steps/$STEP_ID/rpki-client/run.log" 2>&1
exit_code=$?
set -e
cp out/rpki.ccr "$REMOTE_ROOT/steps/$STEP_ID/rpki-client/result.ccr" 2>/dev/null || true
cp out/rpki.cir "$REMOTE_ROOT/steps/$STEP_ID/rpki-client/result.cir" 2>/dev/null || true
cp out/openbgpd "$REMOTE_ROOT/steps/$STEP_ID/rpki-client/openbgpd" 2>/dev/null || true
finished_ms="$(python3 - <<'PY'
import time
@ -309,26 +376,54 @@ else
fi
EOS
for rel in result.ccr round-result.json run.log stage-timing.json; do
for rel in result.ccr result.cir round-result.json run.log stage-timing.json; do
scp -C "$SSH_TARGET:$REMOTE_ROOT/steps/$step_id/ours/$rel" "$local_step/ours/"
done
for rel in result.ccr round-result.json run.log openbgpd; do
for rel in result.ccr result.cir round-result.json run.log openbgpd; do
scp -C "$SSH_TARGET:$REMOTE_ROOT/steps/$step_id/rpki-client/$rel" "$local_step/rpki-client/" || true
done
if [[ "$COPY_RPKI_CLIENT_CACHE" == "1" ]]; then
mkdir -p "$local_step/rpki-client/cache"
rsync -a --delete "$SSH_TARGET:$REMOTE_ROOT/state/rpki-client/cache/" "$local_step/rpki-client/cache/"
fi
if [[ -f "$local_step/ours/result.cir" && -f "$local_step/rpki-client/result.cir" ]]; then
"$ROOT_DIR/scripts/periodic/compare_ccr_cir_round.sh" \
--ours-ccr "$local_step/ours/result.ccr" \
--rpki-client-ccr "$local_step/rpki-client/result.ccr" \
--ours-cir "$local_step/ours/result.cir" \
--rpki-client-cir "$local_step/rpki-client/result.cir" \
--out-dir "$local_step/compare" \
--trust-anchor unknown >/dev/null
if [[ "$PROBE_RPKI_CLIENT_CACHE" == "1" ]]; then
ssh "$SSH_TARGET" "set -e; mkdir -p '$REMOTE_ROOT/steps/$step_id/compare/cir'; '$REMOTE_ROOT/bin/cir_probe_rpki_client_cache' --ours-cir '$REMOTE_ROOT/steps/$step_id/ours/result.cir' --rpki-client-cir '$REMOTE_ROOT/steps/$step_id/rpki-client/result.cir' --cache-root '$REMOTE_ROOT/state/rpki-client/cache' --rpki-client-log '$REMOTE_ROOT/steps/$step_id/rpki-client/run.log' --out-json '$REMOTE_ROOT/steps/$step_id/compare/cir/rpki-client-cache-probe.json' --sample-limit 50 >/dev/null"
scp -C "$SSH_TARGET:$REMOTE_ROOT/steps/$step_id/compare/cir/rpki-client-cache-probe.json" "$local_step/compare/cir/"
fi
if [[ "$COPY_RPKI_CLIENT_CACHE" == "1" ]]; then
"$ROOT_DIR/target/release/cir_probe_rpki_client_cache" \
--ours-cir "$local_step/ours/result.cir" \
--rpki-client-cir "$local_step/rpki-client/result.cir" \
--cache-root "$local_step/rpki-client/cache" \
--rpki-client-log "$local_step/rpki-client/run.log" \
--out-json "$local_step/compare/cir/rpki-client-cache-probe.json" \
--sample-limit 50 >/dev/null
fi
else
"$ROOT_DIR/scripts/periodic/compare_ccr_round.sh" \
--ours-ccr "$local_step/ours/result.ccr" \
--rpki-client-ccr "$local_step/rpki-client/result.ccr" \
--out-dir "$local_step/compare" \
--trust-anchor unknown >/dev/null
fi
python3 - <<'PY' "$local_step/ours/round-result.json" "$local_step/rpki-client/round-result.json" "$local_step/ours/stage-timing.json" "$local_step/compare/compare-summary.json" "$local_step/step-summary.json" "$OURS_EXTRA_ARGS"
python3 - <<'PY' "$local_step/ours/round-result.json" "$local_step/rpki-client/round-result.json" "$local_step/ours/stage-timing.json" "$local_step/compare/summary.json" "$local_step/compare/compare-summary.json" "$local_step/step-summary.json" "$OURS_EXTRA_ARGS"
import json, sys
ours = json.load(open(sys.argv[1]))
client = json.load(open(sys.argv[2]))
stage = json.load(open(sys.argv[3]))
compare = json.load(open(sys.argv[4]))
ours_extra_args = sys.argv[6]
compare_path = sys.argv[4] if __import__('pathlib').Path(sys.argv[4]).exists() else sys.argv[5]
compare = json.load(open(compare_path))
ours_extra_args = sys.argv[7]
json.dump(
{
"stepId": ours["stepId"],
@ -355,7 +450,7 @@ json.dump(
"onlyInOurs": len(compare["vrps"]["onlyInOurs"]),
"onlyInRpkiClient": len(compare["vrps"]["onlyInRpkiClient"]),
},
open(sys.argv[5], "w"),
open(sys.argv[6], "w"),
indent=2,
)
PY

View File

@ -0,0 +1,188 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'USAGE'
Usage:
./scripts/periodic/compare_ccr_cir_round.sh \
--ours-ccr <path> \
--rpki-client-ccr <path> \
--out-dir <path> \
[--ours-cir <path>] \
[--rpki-client-cir <path>] \
[--trust-anchor <name>] \
[--sample-limit <n>] \
[--always-compare-cir]
USAGE
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
OURS_CCR=""
CLIENT_CCR=""
OURS_CIR=""
CLIENT_CIR=""
OUT_DIR=""
TRUST_ANCHOR="unknown"
SAMPLE_LIMIT="20"
ALWAYS_COMPARE_CIR=0
while [[ $# -gt 0 ]]; do
case "$1" in
--ours-ccr) OURS_CCR="$2"; shift 2 ;;
--rpki-client-ccr) CLIENT_CCR="$2"; shift 2 ;;
--ours-cir) OURS_CIR="$2"; shift 2 ;;
--rpki-client-cir) CLIENT_CIR="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--trust-anchor) TRUST_ANCHOR="$2"; shift 2 ;;
--sample-limit) SAMPLE_LIMIT="$2"; shift 2 ;;
--always-compare-cir) ALWAYS_COMPARE_CIR=1; shift ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$OURS_CCR" && -n "$CLIENT_CCR" && -n "$OUT_DIR" ]] || { usage >&2; exit 2; }
if [[ -n "$OURS_CIR" || -n "$CLIENT_CIR" ]]; then
[[ -n "$OURS_CIR" && -n "$CLIENT_CIR" ]] || { echo "--ours-cir and --rpki-client-cir must be provided together" >&2; exit 2; }
fi
mkdir -p "$OUT_DIR/ccr"
"$ROOT_DIR/scripts/periodic/compare_ccr_round.sh" \
--ours-ccr "$OURS_CCR" \
--rpki-client-ccr "$CLIENT_CCR" \
--out-dir "$OUT_DIR/ccr" \
--trust-anchor "$TRUST_ANCHOR" >/dev/null
RUN_CIR="$(python3 - <<'PY' "$OUT_DIR/ccr/compare-summary.json" "$ALWAYS_COMPARE_CIR" "$OURS_CIR" "$CLIENT_CIR"
import json
import sys
summary = json.load(open(sys.argv[1], "r", encoding="utf-8"))
always = sys.argv[2] == "1"
has_cir = bool(sys.argv[3]) and bool(sys.argv[4])
ccr_match = summary.get("stateDigestMatch") is True or summary.get("allMatch") is True
print("1" if has_cir and (always or not ccr_match) else "0")
PY
)"
if [[ "$RUN_CIR" == "1" ]]; then
mkdir -p "$OUT_DIR/cir"
"$ROOT_DIR/scripts/periodic/compare_cir_round.sh" \
--ours-cir "$OURS_CIR" \
--rpki-client-cir "$CLIENT_CIR" \
--out-dir "$OUT_DIR/cir" \
--sample-limit "$SAMPLE_LIMIT" >/dev/null
fi
python3 - <<'PY' \
"$OUT_DIR/ccr/compare-summary.json" \
"$OUT_DIR/cir/cir-compare-summary.json" \
"$OUT_DIR/summary.json" \
"$OUT_DIR/summary.md" \
"$ALWAYS_COMPARE_CIR" \
"$OURS_CIR" \
"$CLIENT_CIR" \
"$TRUST_ANCHOR" \
"$SAMPLE_LIMIT"
import json
import sys
from pathlib import Path
ccr_path = Path(sys.argv[1])
cir_path = Path(sys.argv[2])
summary_json = Path(sys.argv[3])
summary_md = Path(sys.argv[4])
always_compare_cir = sys.argv[5] == "1"
ours_cir = sys.argv[6]
client_cir = sys.argv[7]
trust_anchor = sys.argv[8]
sample_limit = int(sys.argv[9])
ccr = json.load(open(ccr_path, "r", encoding="utf-8"))
ccr_match = ccr.get("stateDigestMatch") is True or ccr.get("allMatch") is True
cir_available = bool(ours_cir) and bool(client_cir)
cir_compared = cir_path.exists()
cir = json.load(open(cir_path, "r", encoding="utf-8")) if cir_compared else None
if ccr_match:
if cir_compared and cir and cir.get("allMatch") is not True:
diagnosis = "ccr_state_same_but_cir_process_diff"
else:
diagnosis = "ccr_state_digest_match"
else:
if not cir_available:
diagnosis = "ccr_mismatch_cir_not_available"
elif not cir_compared:
diagnosis = "ccr_mismatch_cir_compare_skipped"
elif cir.get("tals", {}).get("match") is not True:
diagnosis = "tal_input_difference"
elif cir.get("objects", {}).get("match") is not True:
diagnosis = "input_object_or_manifest_accepted_set_difference"
elif (
cir.get("rejects", {}).get("match") is not True
or cir.get("rejectListSha256Match") is not True
):
diagnosis = "validation_reject_policy_difference"
else:
diagnosis = "ccr_projection_sorting_encoding_or_non_cir_state_difference"
combined = {
"allMatch": bool(ccr_match and (not cir_compared or (cir and cir.get("allMatch") is True))),
"diagnosis": diagnosis,
"comparePath": ccr.get("comparePath"),
"stateDigestMatch": ccr_match,
"mismatchedStates": ccr.get("mismatchedStates", []),
"mismatchedComponents": ccr.get("mismatchedComponents", []),
"vrps": ccr.get("vrps"),
"vaps": ccr.get("vaps"),
"trustAnchor": trust_anchor,
"sampleLimit": sample_limit,
"ccr": {
"summaryPath": str(ccr_path),
"stateDigestMatch": ccr_match,
"comparePath": ccr.get("comparePath"),
"mismatchedStates": ccr.get("mismatchedStates", []),
"mismatchedComponents": ccr.get("mismatchedComponents", []),
"vrpMatch": ccr.get("vrps", {}).get("match"),
"vapMatch": ccr.get("vaps", {}).get("match"),
},
"cir": {
"available": cir_available,
"compared": cir_compared,
"summaryPath": str(cir_path) if cir_compared else None,
"comparePolicy": "always" if always_compare_cir else "on_ccr_mismatch",
"allMatch": cir.get("allMatch") if cir else None,
"objectsMatch": cir.get("objects", {}).get("match") if cir else None,
"rejectsMatch": cir.get("rejects", {}).get("match") if cir else None,
"talsMatch": cir.get("tals", {}).get("match") if cir else None,
"rejectListSha256Match": cir.get("rejectListSha256Match") if cir else None,
"oursObjectCount": cir.get("ours", {}).get("objectCount") if cir else None,
"rpkiClientObjectCount": cir.get("rpkiClient", {}).get("objectCount") if cir else None,
"oursRejectCount": cir.get("ours", {}).get("rejectCount") if cir else None,
"rpkiClientRejectCount": cir.get("rpkiClient", {}).get("rejectCount") if cir else None,
},
}
summary_json.write_text(json.dumps(combined, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
lines = [
"# CCR/CIR Compare Summary",
"",
f"- `allMatch`: `{str(combined['allMatch']).lower()}`",
f"- `diagnosis`: `{combined['diagnosis']}`",
f"- `ccrStateDigestMatch`: `{str(ccr_match).lower()}`",
f"- `ccrMismatchedStates`: `{','.join(combined['ccr']['mismatchedStates'])}`",
f"- `cirCompared`: `{str(cir_compared).lower()}`",
]
if cir:
lines.extend([
f"- `cirObjectsMatch`: `{str(combined['cir']['objectsMatch']).lower()}`",
f"- `cirRejectsMatch`: `{str(combined['cir']['rejectsMatch']).lower()}`",
f"- `cirTalsMatch`: `{str(combined['cir']['talsMatch']).lower()}`",
f"- `cirCounts`: ours objects `{combined['cir']['oursObjectCount']}`, rpki-client objects `{combined['cir']['rpkiClientObjectCount']}`, ours rejects `{combined['cir']['oursRejectCount']}`, rpki-client rejects `{combined['cir']['rpkiClientRejectCount']}`",
])
else:
lines.append(f"- `cirSkippedReason`: `{'ccr matched' if ccr_match and not always_compare_cir else 'CIR inputs unavailable'}`")
summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8")
print(summary_json)
PY

View File

@ -18,6 +18,7 @@ CLIENT_CCR=""
OUT_DIR=""
TRUST_ANCHOR="unknown"
CCR_TO_COMPARE_VIEWS_BIN="$ROOT_DIR/target/release/ccr_to_compare_views"
CCR_STATE_COMPARE_BIN="$ROOT_DIR/target/release/ccr_state_compare"
while [[ $# -gt 0 ]]; do
case "$1" in
@ -34,10 +35,10 @@ done
mkdir -p "$OUT_DIR"
if [[ ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
if [[ ! -x "$CCR_STATE_COMPARE_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin ccr_to_compare_views
cargo build --release --bin ccr_state_compare --bin ccr_to_compare_views
)
fi
@ -48,76 +49,13 @@ CLIENT_VAPS="$OUT_DIR/rpki-client-vaps.csv"
SUMMARY_JSON="$OUT_DIR/compare-summary.json"
SUMMARY_MD="$OUT_DIR/compare-summary.md"
"$CCR_TO_COMPARE_VIEWS_BIN" \
--ccr "$OURS_CCR" \
--vrps-out "$OURS_VRPS" \
--vaps-out "$OURS_VAPS" \
--trust-anchor "$TRUST_ANCHOR"
"$CCR_TO_COMPARE_VIEWS_BIN" \
--ccr "$CLIENT_CCR" \
--vrps-out "$CLIENT_VRPS" \
--vaps-out "$CLIENT_VAPS" \
--trust-anchor "$TRUST_ANCHOR"
python3 - <<'PY' "$OURS_VRPS" "$CLIENT_VRPS" "$OURS_VAPS" "$CLIENT_VAPS" "$SUMMARY_JSON" "$SUMMARY_MD"
import csv
import json
import sys
from pathlib import Path
ours_vrps_path, client_vrps_path, ours_vaps_path, client_vaps_path, json_out, md_out = sys.argv[1:]
def rows(path):
with open(path, newline="") as f:
return list(csv.reader(f))[1:]
ours_vrps = {tuple(r) for r in rows(ours_vrps_path)}
client_vrps = {tuple(r) for r in rows(client_vrps_path)}
ours_vaps = {tuple(r) for r in rows(ours_vaps_path)}
client_vaps = {tuple(r) for r in rows(client_vaps_path)}
summary = {
"vrps": {
"ours": len(ours_vrps),
"rpkiClient": len(client_vrps),
"match": ours_vrps == client_vrps,
"onlyInOurs": sorted(ours_vrps - client_vrps)[:20],
"onlyInRpkiClient": sorted(client_vrps - ours_vrps)[:20],
},
"vaps": {
"ours": len(ours_vaps),
"rpkiClient": len(client_vaps),
"match": ours_vaps == client_vaps,
"onlyInOurs": sorted(ours_vaps - client_vaps)[:20],
"onlyInRpkiClient": sorted(client_vaps - ours_vaps)[:20],
},
}
summary["allMatch"] = summary["vrps"]["match"] and summary["vaps"]["match"]
Path(json_out).write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = [
"# Round Compare Summary",
"",
f"- `allMatch`: `{summary['allMatch']}`",
f"- `vrpMatch`: `{summary['vrps']['match']}`",
f"- `vapMatch`: `{summary['vaps']['match']}`",
f"- `ours_vrps`: `{summary['vrps']['ours']}`",
f"- `rpki_client_vrps`: `{summary['vrps']['rpkiClient']}`",
f"- `ours_vaps`: `{summary['vaps']['ours']}`",
f"- `rpki_client_vaps`: `{summary['vaps']['rpkiClient']}`",
"",
"## Sample Differences",
"",
f"- `vrps.onlyInOurs`: `{len(summary['vrps']['onlyInOurs'])}`",
f"- `vrps.onlyInRpkiClient`: `{len(summary['vrps']['onlyInRpkiClient'])}`",
f"- `vaps.onlyInOurs`: `{len(summary['vaps']['onlyInOurs'])}`",
f"- `vaps.onlyInRpkiClient`: `{len(summary['vaps']['onlyInRpkiClient'])}`",
]
Path(md_out).write_text("\n".join(lines) + "\n", encoding="utf-8")
PY
"$CCR_STATE_COMPARE_BIN" \
--ours-ccr "$OURS_CCR" \
--rpki-client-ccr "$CLIENT_CCR" \
--out-json "$SUMMARY_JSON" \
--out-md "$SUMMARY_MD" \
--out-dir "$OUT_DIR" \
--trust-anchor "$TRUST_ANCHOR" \
--fallback-compare-views
echo "$OUT_DIR"

View File

@ -0,0 +1,47 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'USAGE'
Usage:
./scripts/periodic/compare_cir_round.sh \
--ours-cir <path> \
--rpki-client-cir <path> \
--out-dir <path> \
[--sample-limit <n>]
USAGE
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
OURS_CIR=""
CLIENT_CIR=""
OUT_DIR=""
SAMPLE_LIMIT="20"
CIR_STATE_COMPARE_BIN="$ROOT_DIR/target/release/cir_state_compare"
while [[ $# -gt 0 ]]; do
case "$1" in
--ours-cir) OURS_CIR="$2"; shift 2 ;;
--rpki-client-cir) CLIENT_CIR="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--sample-limit) SAMPLE_LIMIT="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$OURS_CIR" && -n "$CLIENT_CIR" && -n "$OUT_DIR" ]] || { usage >&2; exit 2; }
mkdir -p "$OUT_DIR"
if [[ ! -x "$CIR_STATE_COMPARE_BIN" ]]; then
(cd "$ROOT_DIR" && cargo build --release --bin cir_state_compare)
fi
"$CIR_STATE_COMPARE_BIN" \
--ours-cir "$OURS_CIR" \
--rpki-client-cir "$CLIENT_CIR" \
--out-json "$OUT_DIR/cir-compare-summary.json" \
--out-md "$OUT_DIR/cir-compare-summary.md" \
--sample-limit "$SAMPLE_LIMIT"
echo "$OUT_DIR"

View File

@ -0,0 +1,520 @@
use std::collections::BTreeSet;
use std::path::{Path, PathBuf};
use rpki::ccr::{
CcrContentInfo, VapCompareRow, VrpCompareRow, compare_state_digests, decode_ccr_compare_views,
decode_content_info, write_vap_csv, write_vrp_csv,
};
use serde_json::json;
#[derive(Debug, Default, PartialEq, Eq)]
struct Args {
ours_ccr: Option<PathBuf>,
peer_ccr: Option<PathBuf>,
out_json: Option<PathBuf>,
out_md: Option<PathBuf>,
out_dir: Option<PathBuf>,
trust_anchor: String,
fallback_compare_views: bool,
}
fn usage() -> &'static str {
"Usage: ccr_state_compare --ours-ccr <path> --rpki-client-ccr <path> --out-json <path> [--out-md <path>] [--out-dir <path>] [--trust-anchor <name>] [--fallback-compare-views]"
}
fn parse_args(argv: &[String]) -> Result<Args, String> {
let mut args = Args {
trust_anchor: "unknown".to_string(),
..Args::default()
};
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--ours-ccr" => {
i += 1;
args.ours_ccr = Some(argv.get(i).ok_or("--ours-ccr requires a value")?.into());
}
"--rpki-client-ccr" | "--peer-ccr" => {
i += 1;
args.peer_ccr = Some(
argv.get(i)
.ok_or("--rpki-client-ccr requires a value")?
.into(),
);
}
"--out-json" => {
i += 1;
args.out_json = Some(argv.get(i).ok_or("--out-json requires a value")?.into());
}
"--out-md" => {
i += 1;
args.out_md = Some(argv.get(i).ok_or("--out-md requires a value")?.into());
}
"--out-dir" => {
i += 1;
args.out_dir = Some(argv.get(i).ok_or("--out-dir requires a value")?.into());
}
"--trust-anchor" => {
i += 1;
args.trust_anchor = argv
.get(i)
.ok_or("--trust-anchor requires a value")?
.clone();
}
"--fallback-compare-views" => {
args.fallback_compare_views = true;
}
"-h" | "--help" => return Err(usage().to_string()),
other => return Err(format!("unknown argument: {other}\n{}", usage())),
}
i += 1;
}
if args.ours_ccr.is_none() {
return Err(format!("--ours-ccr is required\n{}", usage()));
}
if args.peer_ccr.is_none() {
return Err(format!("--rpki-client-ccr is required\n{}", usage()));
}
if args.out_json.is_none() {
return Err(format!("--out-json is required\n{}", usage()));
}
Ok(args)
}
fn main() -> Result<(), String> {
let args = parse_args(&std::env::args().collect::<Vec<_>>())?;
run(args)
}
fn run(args: Args) -> Result<(), String> {
let ours_path = args.ours_ccr.as_ref().unwrap();
let peer_path = args.peer_ccr.as_ref().unwrap();
let ours_der = read_file(ours_path)?;
let peer_der = read_file(peer_path)?;
let comparison = compare_state_digests(&ours_der, &peer_der).map_err(|e| e.to_string())?;
let state_digest_match = comparison.matches();
let mismatched_states = comparison.mismatched_state_names();
let mut mismatched_components = Vec::new();
if comparison.ours.version != comparison.peer.version {
mismatched_components.push("version".to_string());
}
if comparison.ours.hash_alg_oid != comparison.peer.hash_alg_oid {
mismatched_components.push("hashAlgorithm".to_string());
}
mismatched_components.extend(mismatched_states.iter().map(|name| (*name).to_string()));
let run_fallback = args.fallback_compare_views && !state_digest_match;
let fallback = if run_fallback {
Some(build_compare_view_fallback(
&ours_der,
&peer_der,
&args.trust_anchor,
args.out_dir.as_deref(),
)?)
} else {
None
};
let all_match = state_digest_match;
let compare_path = if state_digest_match {
"ccr_state_digest_match"
} else if let Some(fallback) = fallback.as_ref() {
if fallback.vrps.match_ && fallback.vaps.match_ {
"ccr_state_digest_mismatch_with_compare_views_match"
} else {
"ccr_state_digest_mismatch_with_set_diff"
}
} else {
"ccr_state_digest_mismatch"
};
let summary = json!({
"comparePath": compare_path,
"allMatch": all_match,
"stateDigestMatch": state_digest_match,
"mismatchedStates": mismatched_states,
"mismatchedComponents": mismatched_components,
"versionMatch": comparison.ours.version == comparison.peer.version,
"hashAlgorithmMatch": comparison.ours.hash_alg_oid == comparison.peer.hash_alg_oid,
"ours": {
"version": comparison.ours.version,
"hashAlg": comparison.ours.hash_alg_oid,
},
"rpkiClient": {
"version": comparison.peer.version,
"hashAlg": comparison.peer.hash_alg_oid,
},
"states": comparison.states.iter().map(|state| json!({
"name": state.name,
"match": state.matches,
"oursPresent": state.ours_present,
"rpkiClientPresent": state.peer_present,
"oursHash": state.ours_hash_hex,
"rpkiClientHash": state.peer_hash_hex,
})).collect::<Vec<_>>(),
"vrps": fallback.as_ref().map(|summary| summary.vrps.to_json()).unwrap_or_else(|| json!({
"ours": serde_json::Value::Null,
"rpkiClient": serde_json::Value::Null,
"match": state_digest_match || !mismatched_states.contains(&"vrps"),
"onlyInOurs": [],
"onlyInRpkiClient": [],
})),
"vaps": fallback.as_ref().map(|summary| summary.vaps.to_json()).unwrap_or_else(|| json!({
"ours": serde_json::Value::Null,
"rpkiClient": serde_json::Value::Null,
"match": state_digest_match || !mismatched_states.contains(&"vaps"),
"onlyInOurs": [],
"onlyInRpkiClient": [],
})),
});
write_json(args.out_json.as_ref().unwrap(), &summary)?;
if let Some(md_path) = args.out_md.as_ref() {
write_markdown(md_path, &summary)?;
}
println!("{}", args.out_json.as_ref().unwrap().display());
Ok(())
}
fn read_file(path: &Path) -> Result<Vec<u8>, String> {
std::fs::read(path).map_err(|e| format!("read file failed: {}: {e}", path.display()))
}
fn write_json(path: &Path, value: &serde_json::Value) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?;
}
std::fs::write(
path,
serde_json::to_vec_pretty(value).map_err(|e| e.to_string())?,
)
.map_err(|e| format!("write json failed: {}: {e}", path.display()))
}
fn write_markdown(path: &Path, summary: &serde_json::Value) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?;
}
let lines = vec![
"# CCR State Compare Summary".to_string(),
String::new(),
format!(
"- `comparePath`: `{}`",
summary["comparePath"].as_str().unwrap_or("-")
),
format!(
"- `allMatch`: `{}`",
summary["allMatch"].as_bool().unwrap_or(false)
),
format!(
"- `stateDigestMatch`: `{}`",
summary["stateDigestMatch"].as_bool().unwrap_or(false)
),
format!(
"- `mismatchedStates`: `{}`",
summary["mismatchedStates"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(|item| item.as_str())
.collect::<Vec<_>>()
.join(",")
})
.unwrap_or_default()
),
format!(
"- `vrpMatch`: `{}`",
summary["vrps"]["match"].as_bool().unwrap_or(false)
),
format!(
"- `vapMatch`: `{}`",
summary["vaps"]["match"].as_bool().unwrap_or(false)
),
];
std::fs::write(path, lines.join("\n") + "\n")
.map_err(|e| format!("write markdown failed: {}: {e}", path.display()))
}
struct FallbackSummary {
vrps: SetSummary<VrpCompareRow>,
vaps: SetSummary<VapCompareRow>,
}
struct SetSummary<T> {
ours: usize,
rpki_client: usize,
match_: bool,
only_in_ours: Vec<T>,
only_in_rpki_client: Vec<T>,
}
impl SetSummary<VrpCompareRow> {
fn to_json(&self) -> serde_json::Value {
json!({
"ours": self.ours,
"rpkiClient": self.rpki_client,
"match": self.match_,
"onlyInOurs": self.only_in_ours.iter().map(vrp_row_json).collect::<Vec<_>>(),
"onlyInRpkiClient": self.only_in_rpki_client.iter().map(vrp_row_json).collect::<Vec<_>>(),
})
}
}
impl SetSummary<VapCompareRow> {
fn to_json(&self) -> serde_json::Value {
json!({
"ours": self.ours,
"rpkiClient": self.rpki_client,
"match": self.match_,
"onlyInOurs": self.only_in_ours.iter().map(vap_row_json).collect::<Vec<_>>(),
"onlyInRpkiClient": self.only_in_rpki_client.iter().map(vap_row_json).collect::<Vec<_>>(),
})
}
}
fn build_compare_view_fallback(
ours_der: &[u8],
peer_der: &[u8],
trust_anchor: &str,
out_dir: Option<&Path>,
) -> Result<FallbackSummary, String> {
let ours = decode_content_info(ours_der).map_err(|e| e.to_string())?;
let peer = decode_content_info(peer_der).map_err(|e| e.to_string())?;
let (ours_vrps, ours_vaps) = decode_views(&ours, trust_anchor)?;
let (peer_vrps, peer_vaps) = decode_views(&peer, trust_anchor)?;
if let Some(out_dir) = out_dir {
write_vrp_csv(&out_dir.join("ours-vrps.csv"), &ours_vrps)?;
write_vap_csv(&out_dir.join("ours-vaps.csv"), &ours_vaps)?;
write_vrp_csv(&out_dir.join("rpki-client-vrps.csv"), &peer_vrps)?;
write_vap_csv(&out_dir.join("rpki-client-vaps.csv"), &peer_vaps)?;
}
Ok(FallbackSummary {
vrps: compare_sets(&ours_vrps, &peer_vrps),
vaps: compare_sets(&ours_vaps, &peer_vaps),
})
}
fn decode_views(
content_info: &CcrContentInfo,
trust_anchor: &str,
) -> Result<(BTreeSet<VrpCompareRow>, BTreeSet<VapCompareRow>), String> {
decode_ccr_compare_views(content_info, trust_anchor)
}
fn compare_sets<T: Ord + Clone>(ours: &BTreeSet<T>, rpki_client: &BTreeSet<T>) -> SetSummary<T> {
SetSummary {
ours: ours.len(),
rpki_client: rpki_client.len(),
match_: ours == rpki_client,
only_in_ours: ours.difference(rpki_client).take(20).cloned().collect(),
only_in_rpki_client: rpki_client.difference(ours).take(20).cloned().collect(),
}
}
fn vrp_row_json(row: &VrpCompareRow) -> serde_json::Value {
json!([row.asn, row.ip_prefix, row.max_length, row.trust_anchor])
}
fn vap_row_json(row: &VapCompareRow) -> serde_json::Value {
json!([row.customer_asn, row.providers, row.trust_anchor])
}
#[cfg(test)]
mod tests {
use super::*;
use rpki::ccr::{
CcrContentInfo, CcrDigestAlgorithm, ManifestState, RpkiCanonicalCacheRepresentation,
build_aspa_payload_state, build_roa_payload_state, encode_content_info,
};
use rpki::data_model::roa::{IpPrefix, RoaAfi};
use rpki::validation::objects::{AspaAttestation, Vrp};
#[test]
fn parse_args_accepts_required_flags() {
let args = parse_args(&[
"ccr_state_compare".to_string(),
"--ours-ccr".to_string(),
"ours.ccr".to_string(),
"--rpki-client-ccr".to_string(),
"peer.ccr".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
"--fallback-compare-views".to_string(),
])
.expect("parse args");
assert_eq!(args.ours_ccr.as_deref(), Some(Path::new("ours.ccr")));
assert_eq!(args.peer_ccr.as_deref(), Some(Path::new("peer.ccr")));
assert_eq!(args.out_json.as_deref(), Some(Path::new("summary.json")));
assert!(args.fallback_compare_views);
}
#[test]
fn parse_args_rejects_missing_peer() {
let err = parse_args(&[
"ccr_state_compare".to_string(),
"--ours-ccr".to_string(),
"ours.ccr".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
])
.unwrap_err();
assert!(err.contains("--rpki-client-ccr is required"), "{err}");
}
#[test]
fn run_reports_digest_match_without_fallback_counts() {
let temp = tempfile::tempdir().expect("tempdir");
let ccr = encode_content_info(&sample_content(64496)).expect("encode");
let ours = temp.path().join("ours.ccr");
let peer = temp.path().join("peer.ccr");
let out = temp.path().join("summary.json");
std::fs::write(&ours, &ccr).expect("write ours");
std::fs::write(&peer, &ccr).expect("write peer");
run(Args {
ours_ccr: Some(ours),
peer_ccr: Some(peer),
out_json: Some(out.clone()),
out_md: None,
out_dir: Some(temp.path().join("compare")),
trust_anchor: "apnic".to_string(),
fallback_compare_views: true,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out).expect("read summary")).expect("json");
assert_eq!(summary["comparePath"], "ccr_state_digest_match");
assert_eq!(summary["allMatch"], true);
assert!(summary["vrps"]["ours"].is_null());
assert!(!temp.path().join("compare/ours-vrps.csv").exists());
}
#[test]
fn run_reports_digest_mismatch_with_vrp_fallback() {
let temp = tempfile::tempdir().expect("tempdir");
let ours_ccr = encode_content_info(&sample_content(64496)).expect("encode ours");
let peer_ccr = encode_content_info(&sample_content(64497)).expect("encode peer");
let ours = temp.path().join("ours.ccr");
let peer = temp.path().join("peer.ccr");
let out = temp.path().join("summary.json");
std::fs::write(&ours, &ours_ccr).expect("write ours");
std::fs::write(&peer, &peer_ccr).expect("write peer");
run(Args {
ours_ccr: Some(ours),
peer_ccr: Some(peer),
out_json: Some(out.clone()),
out_md: None,
out_dir: Some(temp.path().join("compare")),
trust_anchor: "apnic".to_string(),
fallback_compare_views: true,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out).expect("read summary")).expect("json");
assert_eq!(
summary["comparePath"],
"ccr_state_digest_mismatch_with_set_diff"
);
assert_eq!(summary["allMatch"], false);
assert_eq!(summary["vrps"]["ours"], 1);
assert_eq!(summary["vrps"]["rpkiClient"], 1);
assert_eq!(summary["vrps"]["match"], false);
assert!(temp.path().join("compare/ours-vrps.csv").exists());
}
#[test]
fn run_reports_digest_mismatch_but_compare_views_match() {
let temp = tempfile::tempdir().expect("tempdir");
let ours_ccr = encode_content_info(&sample_content_with_manifest_hash(64496, 0x11))
.expect("encode ours");
let peer_ccr = encode_content_info(&sample_content_with_manifest_hash(64496, 0x22))
.expect("encode peer");
let ours = temp.path().join("ours.ccr");
let peer = temp.path().join("peer.ccr");
let out = temp.path().join("nested/summary.json");
let out_md = temp.path().join("nested/summary.md");
let out_dir = temp.path().join("compare");
std::fs::write(&ours, &ours_ccr).expect("write ours");
std::fs::write(&peer, &peer_ccr).expect("write peer");
run(Args {
ours_ccr: Some(ours),
peer_ccr: Some(peer),
out_json: Some(out.clone()),
out_md: Some(out_md.clone()),
out_dir: Some(out_dir.clone()),
trust_anchor: "apnic".to_string(),
fallback_compare_views: true,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out).expect("read summary")).expect("json");
assert_eq!(
summary["comparePath"],
"ccr_state_digest_mismatch_with_compare_views_match"
);
assert_eq!(summary["allMatch"], false);
assert_eq!(summary["stateDigestMatch"], false);
assert_eq!(summary["mismatchedStates"], serde_json::json!(["mfts"]));
assert_eq!(summary["vrps"]["ours"], 1);
assert_eq!(summary["vrps"]["rpkiClient"], 1);
assert_eq!(summary["vrps"]["match"], true);
assert_eq!(summary["vaps"]["match"], true);
assert!(out_dir.join("ours-vrps.csv").exists());
assert!(
std::fs::read_to_string(out_md)
.expect("read markdown")
.contains("compare_views_match")
);
}
fn sample_content(asn: u32) -> CcrContentInfo {
sample_content_with_manifest(asn, None)
}
fn sample_content_with_manifest_hash(asn: u32, manifest_hash_fill: u8) -> CcrContentInfo {
sample_content_with_manifest(
asn,
Some(ManifestState {
mis: Vec::new(),
most_recent_update: time::OffsetDateTime::UNIX_EPOCH,
hash: vec![manifest_hash_fill; 32],
}),
)
}
fn sample_content_with_manifest(asn: u32, mfts: Option<ManifestState>) -> CcrContentInfo {
let vrps = build_roa_payload_state(&[Vrp {
asn,
prefix: IpPrefix {
afi: RoaAfi::Ipv4,
prefix_len: 24,
addr: [192, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
},
max_length: 24,
}])
.expect("build vrps");
let vaps = build_aspa_payload_state(&[AspaAttestation {
customer_as_id: asn,
provider_as_ids: vec![64497],
}])
.expect("build vaps");
CcrContentInfo::new(RpkiCanonicalCacheRepresentation {
version: 0,
hash_alg: CcrDigestAlgorithm::Sha256,
produced_at: time::OffsetDateTime::UNIX_EPOCH,
mfts,
vrps: Some(vrps),
vaps: Some(vaps),
tas: None,
rks: None,
})
}
}

View File

@ -56,7 +56,20 @@ fn real_main() -> Result<(), String> {
.map_err(|e| format!("read cir failed: {}: {e}", cir_path.display()))?;
let cir = rpki::cir::decode_cir(&bytes).map_err(|e| format!("decode cir failed: {e}"))?;
println!("reject_list_sha256={}", hex::encode(&cir.reject_list_sha256));
println!("object_count={}", cir.objects.len());
println!("tal_count={}", cir.tals.len());
for (index, item) in cir.objects.iter().take(args.limit).enumerate() {
println!(
"{:04} object={} sha256={}",
index + 1,
item.rsync_uri,
hex::encode(&item.sha256)
);
}
println!(
"reject_list_sha256={}",
hex::encode(&cir.reject_list_sha256)
);
println!("reject_count={}", cir.rejected_objects.len());
for (index, item) in cir.rejected_objects.iter().take(args.limit).enumerate() {
println!(
@ -89,7 +102,10 @@ mod tests {
"5".to_string(),
])
.expect("parse args");
assert_eq!(args.cir_path.as_deref(), Some(std::path::Path::new("input.cir")));
assert_eq!(
args.cir_path.as_deref(),
Some(std::path::Path::new("input.cir"))
);
assert_eq!(args.limit, 5);
}
}

View File

@ -0,0 +1,689 @@
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::path::{Path, PathBuf};
use rpki::cir::decode_cir;
use serde_json::json;
use sha2::{Digest, Sha256};
#[derive(Debug, Default, PartialEq, Eq)]
struct Args {
ours_cir: Option<PathBuf>,
peer_cir: Option<PathBuf>,
cache_root: Option<PathBuf>,
rpki_client_log: Option<PathBuf>,
out_json: Option<PathBuf>,
sample_limit: usize,
}
fn usage() -> &'static str {
"Usage: cir_probe_rpki_client_cache --ours-cir <path> --rpki-client-cir <path> --cache-root <path> --out-json <path> [--rpki-client-log <path>] [--sample-limit <n>]"
}
fn main() {
if let Err(err) = real_main() {
eprintln!("{err}");
std::process::exit(1);
}
}
fn real_main() -> Result<(), String> {
run(parse_args(&std::env::args().collect::<Vec<_>>())?)
}
fn parse_args(argv: &[String]) -> Result<Args, String> {
let mut args = Args {
sample_limit: 20,
..Args::default()
};
let mut index = 1usize;
while index < argv.len() {
match argv[index].as_str() {
"--ours-cir" => {
index += 1;
args.ours_cir = Some(PathBuf::from(
argv.get(index).ok_or("--ours-cir requires a value")?,
));
}
"--rpki-client-cir" | "--peer-cir" => {
index += 1;
args.peer_cir = Some(PathBuf::from(
argv.get(index)
.ok_or("--rpki-client-cir requires a value")?,
));
}
"--cache-root" => {
index += 1;
args.cache_root = Some(PathBuf::from(
argv.get(index).ok_or("--cache-root requires a value")?,
));
}
"--rpki-client-log" => {
index += 1;
args.rpki_client_log = Some(PathBuf::from(
argv.get(index)
.ok_or("--rpki-client-log requires a value")?,
));
}
"--out-json" => {
index += 1;
args.out_json = Some(PathBuf::from(
argv.get(index).ok_or("--out-json requires a value")?,
));
}
"--sample-limit" => {
index += 1;
let value = argv.get(index).ok_or("--sample-limit requires a value")?;
args.sample_limit = value
.parse::<usize>()
.map_err(|_| format!("invalid --sample-limit: {value}"))?;
}
"-h" | "--help" => return Err(usage().to_string()),
other => return Err(format!("unknown argument: {other}\n{}", usage())),
}
index += 1;
}
if args.ours_cir.is_none() {
return Err(format!("--ours-cir is required\n{}", usage()));
}
if args.peer_cir.is_none() {
return Err(format!("--rpki-client-cir is required\n{}", usage()));
}
if args.cache_root.is_none() {
return Err(format!("--cache-root is required\n{}", usage()));
}
if args.out_json.is_none() {
return Err(format!("--out-json is required\n{}", usage()));
}
Ok(args)
}
fn run(args: Args) -> Result<(), String> {
let ours_cir_path = args.ours_cir.as_ref().expect("validated");
let peer_cir_path = args.peer_cir.as_ref().expect("validated");
let cache_root = args.cache_root.as_ref().expect("validated");
let out_json = args.out_json.as_ref().expect("validated");
let ours = decode_cir(&read_file(ours_cir_path)?)
.map_err(|e| format!("decode ours CIR failed: {e}"))?;
let peer = decode_cir(&read_file(peer_cir_path)?)
.map_err(|e| format!("decode rpki-client CIR failed: {e}"))?;
let peer_objects = peer
.objects
.iter()
.map(|item| item.rsync_uri.as_str())
.collect::<BTreeSet<_>>();
let only_in_ours = ours
.objects
.iter()
.filter(|item| !peer_objects.contains(item.rsync_uri.as_str()))
.map(|item| ProbeObject {
uri: item.rsync_uri.clone(),
sha256_hex: hex::encode(&item.sha256),
})
.collect::<Vec<_>>();
let rrdp_dirs = list_rrdp_dirs(cache_root)?;
let log_mentions = match args.rpki_client_log.as_ref() {
Some(path) => build_log_mentions(path, &only_in_ours)?,
None => LogMentions::default(),
};
let mut cache_hash_match_count = 0usize;
let mut cache_hash_mismatch_count = 0usize;
let mut cache_missing_count = 0usize;
let mut valid_path_exists_count = 0usize;
let mut rrdp_temp_path_exists_count = 0usize;
let mut rsync_temp_path_exists_count = 0usize;
let mut log_mention_count = 0usize;
let mut samples = Vec::new();
let mut missing_uris = Vec::new();
let mut matched_uris = Vec::new();
let mut mismatched_uris = Vec::new();
let mut log_mentioned_uris = Vec::new();
for object in &only_in_ours {
let probe = probe_cache(cache_root, &rrdp_dirs, object)?;
if probe.hash_match {
cache_hash_match_count += 1;
matched_uris.push(object.uri.as_str());
} else if probe.exists {
cache_hash_mismatch_count += 1;
mismatched_uris.push(object.uri.as_str());
} else {
cache_missing_count += 1;
missing_uris.push(object.uri.as_str());
}
if probe.valid_path_exists {
valid_path_exists_count += 1;
}
if probe.rrdp_temp_path_exists {
rrdp_temp_path_exists_count += 1;
}
if probe.rsync_temp_path_exists {
rsync_temp_path_exists_count += 1;
}
let log_mention = log_mentions.mentioned.contains(object.uri.as_str());
if log_mention {
log_mention_count += 1;
log_mentioned_uris.push(object.uri.as_str());
}
if samples.len() < args.sample_limit {
samples.push(json!({
"uri": object.uri,
"sha256": object.sha256_hex,
"cacheStatus": probe.status(),
"cacheLocations": probe.locations,
"logMentionedAsSuperfluousOrDeleted": log_mention,
}));
}
}
let summary = json!({
"onlyInOursTotal": only_in_ours.len(),
"cacheRoot": cache_root,
"rpkiClientLog": args.rpki_client_log,
"rrdpCacheDirs": rrdp_dirs.len(),
"cacheProbe": {
"hashMatchCount": cache_hash_match_count,
"hashMismatchCount": cache_hash_mismatch_count,
"missingCount": cache_missing_count,
"validPathExistsCount": valid_path_exists_count,
"rrdpTempPathExistsCount": rrdp_temp_path_exists_count,
"rsyncTempPathExistsCount": rsync_temp_path_exists_count,
"hashMatchByExtension": group_by_extension(matched_uris.iter().copied()),
"missingByExtension": group_by_extension(missing_uris.iter().copied()),
"hashMismatchByExtension": group_by_extension(mismatched_uris.iter().copied()),
"hashMatchByHostTop": top_hosts(matched_uris.iter().copied(), args.sample_limit),
"missingByHostTop": top_hosts(missing_uris.iter().copied(), args.sample_limit),
"hashMismatchByHostTop": top_hosts(mismatched_uris.iter().copied(), args.sample_limit),
},
"logProbe": {
"enabled": args.rpki_client_log.is_some(),
"relevantLineCount": log_mentions.relevant_line_count,
"mentionedCount": log_mention_count,
"mentionedByExtension": group_by_extension(log_mentioned_uris.iter().copied()),
"mentionedByHostTop": top_hosts(log_mentioned_uris.iter().copied(), args.sample_limit),
},
"samples": samples,
});
write_json(out_json, &summary)?;
println!("{}", out_json.display());
Ok(())
}
#[derive(Clone, Debug)]
struct ProbeObject {
uri: String,
sha256_hex: String,
}
#[derive(Default, Debug)]
struct CacheProbe {
exists: bool,
hash_match: bool,
valid_path_exists: bool,
rrdp_temp_path_exists: bool,
rsync_temp_path_exists: bool,
locations: Vec<serde_json::Value>,
}
impl CacheProbe {
fn status(&self) -> &'static str {
if self.hash_match {
"hash_match"
} else if self.exists {
"hash_mismatch"
} else {
"missing"
}
}
}
fn probe_cache(
cache_root: &Path,
rrdp_dirs: &[PathBuf],
object: &ProbeObject,
) -> Result<CacheProbe, String> {
let Some(stripped) = strip_rsync_uri(&object.uri) else {
return Ok(CacheProbe::default());
};
let mut probe = CacheProbe::default();
let mut candidates = vec![
("valid", cache_root.join(stripped)),
("rsync_temp", cache_root.join(".rsync").join(stripped)),
];
for dir in rrdp_dirs {
candidates.push(("rrdp_temp", dir.join(stripped)));
}
for (kind, path) in candidates {
if !path.is_file() {
continue;
}
let sha256_hex = sha256_file_hex(&path)?;
let hash_matches = sha256_hex == object.sha256_hex;
probe.exists = true;
probe.hash_match |= hash_matches;
match kind {
"valid" => probe.valid_path_exists = true,
"rsync_temp" => probe.rsync_temp_path_exists = true,
"rrdp_temp" => probe.rrdp_temp_path_exists = true,
_ => {}
}
if probe.locations.len() < 4 {
probe.locations.push(json!({
"kind": kind,
"path": path,
"sha256": sha256_hex,
"hashMatches": hash_matches,
}));
}
}
Ok(probe)
}
#[derive(Default, Debug)]
struct LogMentions {
mentioned: BTreeSet<String>,
relevant_line_count: usize,
}
fn build_log_mentions(path: &Path, objects: &[ProbeObject]) -> Result<LogMentions, String> {
let content = std::fs::read_to_string(path)
.map_err(|e| format!("read log failed: {}: {e}", path.display()))?;
let mut by_file_name: HashMap<String, Vec<(&str, &str)>> = HashMap::new();
for object in objects {
let Some(stripped) = strip_rsync_uri(&object.uri) else {
continue;
};
by_file_name
.entry(file_name(stripped).to_string())
.or_default()
.push((object.uri.as_str(), stripped));
}
let mut mentioned = BTreeSet::new();
let mut relevant_line_count = 0usize;
for line in content.lines() {
if !is_relevant_cleanup_line(line) {
continue;
}
relevant_line_count += 1;
let Some(name) = line.split('/').next_back().map(trim_log_token) else {
continue;
};
let Some(candidates) = by_file_name.get(name) else {
continue;
};
for (uri, stripped) in candidates {
if line.contains(stripped) {
mentioned.insert((*uri).to_string());
}
}
}
Ok(LogMentions {
mentioned,
relevant_line_count,
})
}
fn is_relevant_cleanup_line(line: &str) -> bool {
line.contains("superfluous")
|| line.contains("deleted ")
|| line.contains("deleted superfluous")
|| line.contains("bad message digest")
|| line.contains("referenced file supposed to be deleted")
}
fn trim_log_token(token: &str) -> &str {
token.trim_matches(|ch: char| ch == ':' || ch == ',' || ch == ')' || ch == '(')
}
fn list_rrdp_dirs(cache_root: &Path) -> Result<Vec<PathBuf>, String> {
let rrdp = cache_root.join(".rrdp");
let mut dirs = Vec::new();
let Ok(read_dir) = std::fs::read_dir(&rrdp) else {
return Ok(dirs);
};
for entry in read_dir {
let entry = entry.map_err(|e| format!("read .rrdp entry failed: {e}"))?;
let path = entry.path();
if path.is_dir() {
dirs.push(path);
}
}
dirs.sort();
Ok(dirs)
}
fn sha256_file_hex(path: &Path) -> Result<String, String> {
let bytes = std::fs::read(path)
.map_err(|e| format!("read cache file failed: {}: {e}", path.display()))?;
Ok(hex::encode(Sha256::digest(&bytes)))
}
fn read_file(path: &Path) -> Result<Vec<u8>, String> {
std::fs::read(path).map_err(|e| format!("read file failed: {}: {e}", path.display()))
}
fn write_json(path: &Path, value: &serde_json::Value) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?;
}
std::fs::write(
path,
serde_json::to_vec_pretty(value).map_err(|e| e.to_string())?,
)
.map_err(|e| format!("write json failed: {}: {e}", path.display()))
}
fn strip_rsync_uri(uri: &str) -> Option<&str> {
uri.strip_prefix("rsync://")
}
fn file_name(path: &str) -> &str {
path.rsplit('/').next().unwrap_or(path)
}
fn group_by_extension<'a>(uris: impl IntoIterator<Item = &'a str>) -> BTreeMap<String, usize> {
let mut counts = BTreeMap::new();
for uri in uris {
*counts.entry(uri_extension(uri)).or_insert(0) += 1;
}
counts
}
fn top_hosts<'a>(uris: impl IntoIterator<Item = &'a str>, limit: usize) -> Vec<serde_json::Value> {
let mut counts = BTreeMap::new();
for uri in uris {
*counts.entry(uri_host(uri)).or_insert(0usize) += 1;
}
let mut rows = counts.into_iter().collect::<Vec<_>>();
rows.sort_by(|(host_a, count_a), (host_b, count_b)| {
count_b.cmp(count_a).then_with(|| host_a.cmp(host_b))
});
rows.into_iter()
.take(limit)
.map(|(host, count)| {
json!({
"host": host,
"count": count,
})
})
.collect()
}
fn uri_host(uri: &str) -> String {
let without_scheme = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri);
without_scheme
.split('/')
.next()
.filter(|host| !host.is_empty())
.unwrap_or("<unknown>")
.to_string()
}
fn uri_extension(uri: &str) -> String {
let path = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri);
let path = path.split_once('/').map(|(_, path)| path).unwrap_or(path);
let file = path.rsplit('/').next().unwrap_or(path);
if let Some((_, ext)) = file.rsplit_once('.')
&& !ext.is_empty()
{
return format!(".{}", ext.to_ascii_lowercase());
}
"<none>".to_string()
}
#[cfg(test)]
mod tests {
use super::*;
use rpki::cir::{
CIR_VERSION_V2, CanonicalInputRepresentation, CirHashAlgorithm, CirObject,
CirRejectedObject, CirTal, compute_reject_list_sha256, encode_cir,
};
#[test]
fn parse_args_accepts_required_flags() {
let args = parse_args(&[
"cir_probe_rpki_client_cache".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--rpki-client-cir".to_string(),
"peer.cir".to_string(),
"--cache-root".to_string(),
"cache".to_string(),
"--out-json".to_string(),
"probe.json".to_string(),
"--sample-limit".to_string(),
"7".to_string(),
])
.expect("parse args");
assert_eq!(args.ours_cir.as_deref(), Some(Path::new("ours.cir")));
assert_eq!(args.peer_cir.as_deref(), Some(Path::new("peer.cir")));
assert_eq!(args.cache_root.as_deref(), Some(Path::new("cache")));
assert_eq!(args.out_json.as_deref(), Some(Path::new("probe.json")));
assert_eq!(args.sample_limit, 7);
}
#[test]
fn parse_args_rejects_missing_invalid_and_unknown_flags() {
let missing = parse_args(&[
"cir_probe_rpki_client_cache".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--peer-cir".to_string(),
"peer.cir".to_string(),
"--cache-root".to_string(),
"cache".to_string(),
])
.unwrap_err();
assert!(missing.contains("--out-json is required"), "{missing}");
let invalid_limit = parse_args(&[
"cir_probe_rpki_client_cache".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--peer-cir".to_string(),
"peer.cir".to_string(),
"--cache-root".to_string(),
"cache".to_string(),
"--out-json".to_string(),
"probe.json".to_string(),
"--sample-limit".to_string(),
"NaN".to_string(),
])
.unwrap_err();
assert!(
invalid_limit.contains("invalid --sample-limit"),
"{invalid_limit}"
);
let unknown = parse_args(&[
"cir_probe_rpki_client_cache".to_string(),
"--unexpected".to_string(),
])
.unwrap_err();
assert!(unknown.contains("unknown argument"), "{unknown}");
}
#[test]
fn run_probes_valid_temp_rrdp_missing_and_log_mentions() {
let temp = tempfile::tempdir().expect("tempdir");
let cache_root = temp.path().join("cache");
let valid_bytes = b"valid object bytes";
let rrdp_bytes = b"rrdp object bytes";
let expected_mismatch_bytes = b"expected mismatch bytes";
write_cache_file(
&cache_root.join("rpki.example.test/repo/a.roa"),
valid_bytes,
);
write_cache_file(
&cache_root.join(".rsync/rpki.example.test/repo/b.mft"),
b"different bytes",
);
write_cache_file(
&cache_root.join(".rrdp/session-1/rpki.example.test/repo/c.cer"),
rrdp_bytes,
);
let ours_cir = sample_cir(&[
(
"rsync://missing.example.test/repo/e.crl",
b"missing bytes".as_slice(),
),
(
"rsync://rpki.example.test/repo/a.roa",
valid_bytes.as_slice(),
),
(
"rsync://rpki.example.test/repo/b.mft",
expected_mismatch_bytes.as_slice(),
),
(
"rsync://rpki.example.test/repo/c.cer",
rrdp_bytes.as_slice(),
),
(
"rsync://shared.example.test/repo/d.roa",
b"shared".as_slice(),
),
]);
let peer_cir = sample_cir(&[(
"rsync://shared.example.test/repo/d.roa",
b"shared".as_slice(),
)]);
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
write_cir(&ours, &ours_cir);
write_cir(&peer, &peer_cir);
let log = temp.path().join("rpki-client.log");
std::fs::write(
&log,
"rpki-client: deleted superfluous /cache/rpki.example.test/repo/b.mft\n",
)
.expect("write log");
let out_json = temp.path().join("nested/probe.json");
run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
cache_root: Some(cache_root),
rpki_client_log: Some(log),
out_json: Some(out_json.clone()),
sample_limit: 10,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out_json).expect("read summary"))
.expect("summary json");
assert_eq!(summary["onlyInOursTotal"], 4);
assert_eq!(summary["rrdpCacheDirs"], 1);
assert_eq!(summary["cacheProbe"]["hashMatchCount"], 2);
assert_eq!(summary["cacheProbe"]["hashMismatchCount"], 1);
assert_eq!(summary["cacheProbe"]["missingCount"], 1);
assert_eq!(summary["cacheProbe"]["validPathExistsCount"], 1);
assert_eq!(summary["cacheProbe"]["rsyncTempPathExistsCount"], 1);
assert_eq!(summary["cacheProbe"]["rrdpTempPathExistsCount"], 1);
assert_eq!(summary["logProbe"]["enabled"], true);
assert_eq!(summary["logProbe"]["relevantLineCount"], 1);
assert_eq!(summary["logProbe"]["mentionedCount"], 1);
assert_eq!(summary["samples"].as_array().unwrap().len(), 4);
}
#[test]
fn run_reports_decode_failure_with_side_label() {
let temp = tempfile::tempdir().expect("tempdir");
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
std::fs::write(&ours, b"not a cir").expect("write invalid");
write_cir(
&peer,
&sample_cir(&[(
"rsync://shared.example.test/repo/d.roa",
b"shared".as_slice(),
)]),
);
let err = run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
cache_root: Some(temp.path().join("cache")),
rpki_client_log: None,
out_json: Some(temp.path().join("probe.json")),
sample_limit: 20,
})
.unwrap_err();
assert!(err.contains("decode ours CIR failed"), "{err}");
}
#[test]
fn uri_helpers_extract_rsync_path_host_and_extension() {
let uri = "rsync://rpki.example.test/repo/a/b.ROA";
assert_eq!(strip_rsync_uri(uri), Some("rpki.example.test/repo/a/b.ROA"));
assert_eq!(uri_host(uri), "rpki.example.test");
assert_eq!(uri_extension(uri), ".roa");
assert_eq!(file_name("rpki.example.test/repo/a/b.ROA"), "b.ROA");
}
#[test]
fn log_mentions_match_cleanup_lines_by_uri_suffix() {
let temp = tempfile::tempdir().expect("tempdir");
let log = temp.path().join("run.log");
std::fs::write(
&log,
"rpki-client: deleted superfluous .rrdp/abc/rpki.example.test/repo/a.roa\n",
)
.expect("write log");
let objects = vec![ProbeObject {
uri: "rsync://rpki.example.test/repo/a.roa".to_string(),
sha256_hex: "00".repeat(32),
}];
let mentions = build_log_mentions(&log, &objects).expect("mentions");
assert_eq!(mentions.relevant_line_count, 1);
assert!(
mentions
.mentioned
.contains("rsync://rpki.example.test/repo/a.roa")
);
}
fn sample_cir(objects: &[(&str, &[u8])]) -> CanonicalInputRepresentation {
let mut objects = objects
.iter()
.map(|(rsync_uri, bytes)| CirObject {
rsync_uri: (*rsync_uri).to_string(),
sha256: Sha256::digest(bytes).to_vec(),
})
.collect::<Vec<_>>();
objects.sort_by(|left, right| left.rsync_uri.cmp(&right.rsync_uri));
CanonicalInputRepresentation {
version: CIR_VERSION_V2,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::UNIX_EPOCH,
objects,
tals: vec![CirTal {
tal_uri: "https://tal.example.test/apnic.tal".to_string(),
tal_bytes: b"https://tal.example.test/apnic.tal\nrsync://example.test/ta.cer\nMIIB"
.to_vec(),
}],
reject_list_sha256: compute_reject_list_sha256(std::iter::empty::<&str>()),
rejected_objects: Vec::<CirRejectedObject>::new(),
}
}
fn write_cir(path: &Path, cir: &CanonicalInputRepresentation) {
std::fs::write(path, encode_cir(cir).expect("encode cir")).expect("write cir");
}
fn write_cache_file(path: &Path, bytes: &[u8]) {
std::fs::create_dir_all(path.parent().expect("parent")).expect("mkdir");
std::fs::write(path, bytes).expect("write cache file");
}
}

View File

@ -0,0 +1,742 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use rpki::cir::decode_cir;
use serde_json::json;
#[derive(Debug, Default, PartialEq, Eq)]
struct Args {
ours_cir: Option<PathBuf>,
peer_cir: Option<PathBuf>,
out_json: Option<PathBuf>,
out_md: Option<PathBuf>,
sample_limit: usize,
}
fn usage() -> &'static str {
"Usage: cir_state_compare --ours-cir <path> --rpki-client-cir <path> --out-json <path> [--out-md <path>] [--sample-limit <n>]"
}
fn parse_args(argv: &[String]) -> Result<Args, String> {
let mut args = Args {
sample_limit: 20,
..Args::default()
};
let mut index = 1usize;
while index < argv.len() {
match argv[index].as_str() {
"--ours-cir" => {
index += 1;
args.ours_cir = Some(PathBuf::from(
argv.get(index).ok_or("--ours-cir requires a value")?,
));
}
"--rpki-client-cir" | "--peer-cir" => {
index += 1;
args.peer_cir = Some(PathBuf::from(
argv.get(index)
.ok_or("--rpki-client-cir requires a value")?,
));
}
"--out-json" => {
index += 1;
args.out_json = Some(PathBuf::from(
argv.get(index).ok_or("--out-json requires a value")?,
));
}
"--out-md" => {
index += 1;
args.out_md = Some(PathBuf::from(
argv.get(index).ok_or("--out-md requires a value")?,
));
}
"--sample-limit" => {
index += 1;
let value = argv.get(index).ok_or("--sample-limit requires a value")?;
args.sample_limit = value
.parse::<usize>()
.map_err(|_| format!("invalid --sample-limit: {value}"))?;
}
"-h" | "--help" => return Err(usage().to_string()),
other => return Err(format!("unknown argument: {other}\n{}", usage())),
}
index += 1;
}
if args.ours_cir.is_none() {
return Err(format!("--ours-cir is required\n{}", usage()));
}
if args.peer_cir.is_none() {
return Err(format!("--rpki-client-cir is required\n{}", usage()));
}
if args.out_json.is_none() {
return Err(format!("--out-json is required\n{}", usage()));
}
Ok(args)
}
fn main() {
if let Err(err) = real_main() {
eprintln!("{err}");
std::process::exit(1);
}
}
fn real_main() -> Result<(), String> {
let args = parse_args(&std::env::args().collect::<Vec<_>>())?;
run(args)
}
fn run(args: Args) -> Result<(), String> {
let ours_path = args.ours_cir.as_ref().expect("validated");
let peer_path = args.peer_cir.as_ref().expect("validated");
let ours =
decode_cir(&read_file(ours_path)?).map_err(|e| format!("decode ours CIR failed: {e}"))?;
let peer = decode_cir(&read_file(peer_path)?)
.map_err(|e| format!("decode rpki-client CIR failed: {e}"))?;
let ours_objects = ours
.objects
.iter()
.map(|item| (item.rsync_uri.clone(), hex::encode(&item.sha256)))
.collect::<BTreeMap<_, _>>();
let peer_objects = peer
.objects
.iter()
.map(|item| (item.rsync_uri.clone(), hex::encode(&item.sha256)))
.collect::<BTreeMap<_, _>>();
let ours_rejects = ours
.rejected_objects
.iter()
.map(|item| item.object_uri.clone())
.collect::<BTreeSet<_>>();
let peer_rejects = peer
.rejected_objects
.iter()
.map(|item| item.object_uri.clone())
.collect::<BTreeSet<_>>();
let ours_tals = ours
.tals
.iter()
.map(|item| item.tal_uri.clone())
.collect::<BTreeSet<_>>();
let peer_tals = peer
.tals
.iter()
.map(|item| item.tal_uri.clone())
.collect::<BTreeSet<_>>();
let object_summary = compare_object_maps(&ours_objects, &peer_objects, args.sample_limit);
let reject_summary = compare_sets(&ours_rejects, &peer_rejects, args.sample_limit);
let tal_summary = compare_sets(&ours_tals, &peer_tals, args.sample_limit);
let reject_hash_match = ours.reject_list_sha256 == peer.reject_list_sha256;
let all_match =
object_summary.match_ && reject_summary.match_ && tal_summary.match_ && reject_hash_match;
let summary = json!({
"allMatch": all_match,
"objects": object_summary.to_json(),
"rejects": reject_summary.to_json(),
"tals": tal_summary.to_json(),
"rejectListSha256Match": reject_hash_match,
"ours": {
"objectCount": ours.objects.len(),
"talCount": ours.tals.len(),
"rejectCount": ours.rejected_objects.len(),
"rejectListSha256": hex::encode(&ours.reject_list_sha256),
"validationTime": ours.validation_time.to_string(),
},
"rpkiClient": {
"objectCount": peer.objects.len(),
"talCount": peer.tals.len(),
"rejectCount": peer.rejected_objects.len(),
"rejectListSha256": hex::encode(&peer.reject_list_sha256),
"validationTime": peer.validation_time.to_string(),
}
});
let out_json = args.out_json.as_ref().expect("validated");
write_json(out_json, &summary)?;
if let Some(out_md) = args.out_md.as_ref() {
write_markdown(out_md, &summary)?;
}
println!("{}", out_json.display());
Ok(())
}
fn read_file(path: &Path) -> Result<Vec<u8>, String> {
std::fs::read(path).map_err(|e| format!("read file failed: {}: {e}", path.display()))
}
fn write_json(path: &Path, value: &serde_json::Value) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?;
}
std::fs::write(
path,
serde_json::to_vec_pretty(value).map_err(|e| e.to_string())?,
)
.map_err(|e| format!("write json failed: {}: {e}", path.display()))
}
fn write_markdown(path: &Path, summary: &serde_json::Value) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create parent dirs failed: {}: {e}", parent.display()))?;
}
let lines = vec![
"# CIR State Compare Summary".to_string(),
String::new(),
format!(
"- `allMatch`: `{}`",
summary["allMatch"].as_bool().unwrap_or(false)
),
format!(
"- `objectsMatch`: `{}`",
summary["objects"]["match"].as_bool().unwrap_or(false)
),
format!(
"- `rejectsMatch`: `{}`",
summary["rejects"]["match"].as_bool().unwrap_or(false)
),
format!(
"- `talsMatch`: `{}`",
summary["tals"]["match"].as_bool().unwrap_or(false)
),
format!(
"- `rejectListSha256Match`: `{}`",
summary["rejectListSha256Match"].as_bool().unwrap_or(false)
),
format!(
"- `ours`: objects `{}`, tals `{}`, rejects `{}`",
summary["ours"]["objectCount"].as_u64().unwrap_or(0),
summary["ours"]["talCount"].as_u64().unwrap_or(0),
summary["ours"]["rejectCount"].as_u64().unwrap_or(0)
),
format!(
"- `rpkiClient`: objects `{}`, tals `{}`, rejects `{}`",
summary["rpkiClient"]["objectCount"].as_u64().unwrap_or(0),
summary["rpkiClient"]["talCount"].as_u64().unwrap_or(0),
summary["rpkiClient"]["rejectCount"].as_u64().unwrap_or(0)
),
];
std::fs::write(path, lines.join("\n") + "\n")
.map_err(|e| format!("write markdown failed: {}: {e}", path.display()))
}
struct SetSummary {
ours: usize,
peer: usize,
match_: bool,
only_in_ours: Vec<String>,
only_in_peer: Vec<String>,
}
impl SetSummary {
fn to_json(&self) -> serde_json::Value {
json!({
"ours": self.ours,
"rpkiClient": self.peer,
"match": self.match_,
"onlyInOurs": self.only_in_ours,
"onlyInRpkiClient": self.only_in_peer,
})
}
}
struct ObjectSummary {
ours: usize,
peer: usize,
match_: bool,
only_in_ours_count: usize,
only_in_peer_count: usize,
hash_mismatch_count: usize,
only_in_ours: Vec<String>,
only_in_peer: Vec<String>,
hash_mismatches: Vec<serde_json::Value>,
only_in_ours_by_extension: BTreeMap<String, usize>,
only_in_peer_by_extension: BTreeMap<String, usize>,
hash_mismatches_by_extension: BTreeMap<String, usize>,
only_in_ours_by_host: Vec<serde_json::Value>,
only_in_peer_by_host: Vec<serde_json::Value>,
hash_mismatches_by_host: Vec<serde_json::Value>,
}
impl ObjectSummary {
fn to_json(&self) -> serde_json::Value {
json!({
"ours": self.ours,
"rpkiClient": self.peer,
"match": self.match_,
"onlyInOursCount": self.only_in_ours_count,
"onlyInRpkiClientCount": self.only_in_peer_count,
"hashMismatchCount": self.hash_mismatch_count,
"onlyInOurs": self.only_in_ours,
"onlyInRpkiClient": self.only_in_peer,
"hashMismatches": self.hash_mismatches,
"onlyInOursByExtension": self.only_in_ours_by_extension,
"onlyInRpkiClientByExtension": self.only_in_peer_by_extension,
"hashMismatchesByExtension": self.hash_mismatches_by_extension,
"onlyInOursByHostTop": self.only_in_ours_by_host,
"onlyInRpkiClientByHostTop": self.only_in_peer_by_host,
"hashMismatchesByHostTop": self.hash_mismatches_by_host,
})
}
}
fn compare_sets(ours: &BTreeSet<String>, peer: &BTreeSet<String>, limit: usize) -> SetSummary {
SetSummary {
ours: ours.len(),
peer: peer.len(),
match_: ours == peer,
only_in_ours: ours.difference(peer).take(limit).cloned().collect(),
only_in_peer: peer.difference(ours).take(limit).cloned().collect(),
}
}
fn compare_object_maps(
ours: &BTreeMap<String, String>,
peer: &BTreeMap<String, String>,
limit: usize,
) -> ObjectSummary {
let ours_keys = ours.keys().cloned().collect::<BTreeSet<_>>();
let peer_keys = peer.keys().cloned().collect::<BTreeSet<_>>();
let only_in_ours_all = ours_keys
.difference(&peer_keys)
.cloned()
.collect::<Vec<_>>();
let only_in_peer_all = peer_keys
.difference(&ours_keys)
.cloned()
.collect::<Vec<_>>();
let only_in_ours = ours_keys
.difference(&peer_keys)
.take(limit)
.cloned()
.collect();
let only_in_peer = peer_keys
.difference(&ours_keys)
.take(limit)
.cloned()
.collect();
let mut hash_mismatches = Vec::new();
let mut hash_mismatch_uris = Vec::new();
for key in ours_keys.intersection(&peer_keys) {
let ours_hash = ours.get(key).expect("key from map");
let peer_hash = peer.get(key).expect("key from map");
if ours_hash != peer_hash {
hash_mismatch_uris.push(key.clone());
if hash_mismatches.len() < limit {
hash_mismatches.push(json!({
"uri": key,
"oursSha256": ours_hash,
"rpkiClientSha256": peer_hash,
}));
}
}
}
ObjectSummary {
ours: ours.len(),
peer: peer.len(),
match_: ours == peer,
only_in_ours_count: only_in_ours_all.len(),
only_in_peer_count: only_in_peer_all.len(),
hash_mismatch_count: hash_mismatch_uris.len(),
only_in_ours,
only_in_peer,
hash_mismatches,
only_in_ours_by_extension: group_by_extension(only_in_ours_all.iter().map(String::as_str)),
only_in_peer_by_extension: group_by_extension(only_in_peer_all.iter().map(String::as_str)),
hash_mismatches_by_extension: group_by_extension(
hash_mismatch_uris.iter().map(String::as_str),
),
only_in_ours_by_host: top_hosts(only_in_ours_all.iter().map(String::as_str), limit),
only_in_peer_by_host: top_hosts(only_in_peer_all.iter().map(String::as_str), limit),
hash_mismatches_by_host: top_hosts(hash_mismatch_uris.iter().map(String::as_str), limit),
}
}
fn group_by_extension<'a>(uris: impl IntoIterator<Item = &'a str>) -> BTreeMap<String, usize> {
let mut counts = BTreeMap::new();
for uri in uris {
*counts.entry(uri_extension(uri)).or_insert(0) += 1;
}
counts
}
fn top_hosts<'a>(uris: impl IntoIterator<Item = &'a str>, limit: usize) -> Vec<serde_json::Value> {
let mut counts = BTreeMap::new();
for uri in uris {
*counts.entry(uri_host(uri)).or_insert(0usize) += 1;
}
let mut rows = counts.into_iter().collect::<Vec<_>>();
rows.sort_by(|(host_a, count_a), (host_b, count_b)| {
count_b.cmp(count_a).then_with(|| host_a.cmp(host_b))
});
rows.into_iter()
.take(limit)
.map(|(host, count)| {
json!({
"host": host,
"count": count,
})
})
.collect()
}
fn uri_host(uri: &str) -> String {
let without_scheme = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri);
without_scheme
.split('/')
.next()
.filter(|host| !host.is_empty())
.unwrap_or("<unknown>")
.to_string()
}
fn uri_extension(uri: &str) -> String {
let path = uri.split_once("://").map(|(_, rest)| rest).unwrap_or(uri);
let path = path.split_once('/').map(|(_, path)| path).unwrap_or(path);
let file = path.rsplit('/').next().unwrap_or(path);
if let Some((_, ext)) = file.rsplit_once('.')
&& !ext.is_empty()
{
return format!(".{}", ext.to_ascii_lowercase());
}
"<none>".to_string()
}
#[cfg(test)]
mod tests {
use super::*;
use rpki::cir::{
CIR_VERSION_V2, CanonicalInputRepresentation, CirHashAlgorithm, CirObject,
CirRejectedObject, CirTal, compute_reject_list_sha256, encode_cir,
};
#[test]
fn parse_args_accepts_required_flags() {
let args = parse_args(&[
"cir_state_compare".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--rpki-client-cir".to_string(),
"peer.cir".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
"--sample-limit".to_string(),
"3".to_string(),
])
.expect("parse args");
assert_eq!(args.ours_cir.as_deref(), Some(Path::new("ours.cir")));
assert_eq!(args.peer_cir.as_deref(), Some(Path::new("peer.cir")));
assert_eq!(args.out_json.as_deref(), Some(Path::new("summary.json")));
assert_eq!(args.sample_limit, 3);
}
#[test]
fn parse_args_rejects_missing_and_invalid_flags() {
let missing_peer = parse_args(&[
"cir_state_compare".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
])
.unwrap_err();
assert!(
missing_peer.contains("--rpki-client-cir is required"),
"{missing_peer}"
);
let invalid_limit = parse_args(&[
"cir_state_compare".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--peer-cir".to_string(),
"peer.cir".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
"--sample-limit".to_string(),
"not-a-number".to_string(),
])
.unwrap_err();
assert!(
invalid_limit.contains("invalid --sample-limit"),
"{invalid_limit}"
);
let unknown = parse_args(&[
"cir_state_compare".to_string(),
"--ours-cir".to_string(),
"ours.cir".to_string(),
"--peer-cir".to_string(),
"peer.cir".to_string(),
"--out-json".to_string(),
"summary.json".to_string(),
"--unexpected".to_string(),
])
.unwrap_err();
assert!(unknown.contains("unknown argument"), "{unknown}");
}
#[test]
fn run_reports_identical_cirs_and_writes_outputs() {
let temp = tempfile::tempdir().expect("tempdir");
let cir = sample_cir(
&[
("rsync://example.net/repo/a.cer", 0x11),
("rsync://example.net/repo/b.roa", 0x22),
],
&["https://tal.example.net/apnic.tal"],
&[("rsync://example.net/repo/rejected.roa", Some("bad roa"))],
);
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
write_cir(&ours, &cir);
write_cir(&peer, &cir);
let out_json = temp.path().join("nested/summary.json");
let out_md = temp.path().join("nested/summary.md");
run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
out_json: Some(out_json.clone()),
out_md: Some(out_md.clone()),
sample_limit: 10,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out_json).expect("read summary"))
.expect("summary json");
assert_eq!(summary["allMatch"], true);
assert_eq!(summary["objects"]["match"], true);
assert_eq!(summary["rejects"]["match"], true);
assert_eq!(summary["tals"]["match"], true);
assert_eq!(summary["rejectListSha256Match"], true);
assert_eq!(summary["ours"]["objectCount"], 2);
assert!(
std::fs::read_to_string(out_md)
.expect("read markdown")
.contains("allMatch")
);
}
#[test]
fn run_reports_object_reject_tal_differences_with_samples() {
let temp = tempfile::tempdir().expect("tempdir");
let ours_cir = sample_cir(
&[
("rsync://example.net/repo/a.roa", 0x11),
("rsync://example.net/repo/c.roa", 0x33),
],
&["https://tal.example.net/apnic.tal"],
&[("rsync://example.net/repo/rejected-a.roa", Some("ours"))],
);
let peer_cir = sample_cir(
&[
("rsync://example.net/repo/a.roa", 0x22),
("rsync://example.net/repo/b.roa", 0x44),
],
&["https://tal.example.net/arin.tal"],
&[("rsync://example.net/repo/rejected-b.roa", Some("peer"))],
);
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
write_cir(&ours, &ours_cir);
write_cir(&peer, &peer_cir);
let out_json = temp.path().join("summary.json");
run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
out_json: Some(out_json.clone()),
out_md: None,
sample_limit: 1,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out_json).expect("read summary"))
.expect("summary json");
assert_eq!(summary["allMatch"], false);
assert_eq!(summary["objects"]["match"], false);
assert_eq!(
summary["objects"]["onlyInOurs"].as_array().unwrap().len(),
1
);
assert_eq!(
summary["objects"]["onlyInRpkiClient"]
.as_array()
.unwrap()
.len(),
1
);
assert_eq!(
summary["objects"]["hashMismatches"]
.as_array()
.unwrap()
.len(),
1
);
assert_eq!(summary["rejects"]["match"], false);
assert_eq!(summary["tals"]["match"], false);
assert_eq!(summary["rejectListSha256Match"], false);
}
#[test]
fn run_ignores_reject_reasons_for_state_comparison() {
let temp = tempfile::tempdir().expect("tempdir");
let ours_cir = sample_cir(
&[("rsync://example.net/repo/a.roa", 0x11)],
&["https://tal.example.net/apnic.tal"],
&[("rsync://example.net/repo/rejected.roa", Some("reason-a"))],
);
let peer_cir = sample_cir(
&[("rsync://example.net/repo/a.roa", 0x11)],
&["https://tal.example.net/apnic.tal"],
&[("rsync://example.net/repo/rejected.roa", Some("reason-b"))],
);
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
write_cir(&ours, &ours_cir);
write_cir(&peer, &peer_cir);
let out_json = temp.path().join("summary.json");
run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
out_json: Some(out_json.clone()),
out_md: None,
sample_limit: 20,
})
.expect("run");
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out_json).expect("read summary"))
.expect("summary json");
assert_eq!(summary["allMatch"], true);
assert_eq!(summary["rejects"]["match"], true);
assert_eq!(summary["rejectListSha256Match"], true);
}
#[test]
fn run_reports_decode_failure_with_side_label() {
let temp = tempfile::tempdir().expect("tempdir");
let ours = temp.path().join("ours.cir");
let peer = temp.path().join("peer.cir");
std::fs::write(&ours, b"not der").expect("write invalid");
write_cir(
&peer,
&sample_cir(
&[("rsync://example.net/repo/a.roa", 0x11)],
&["https://tal.example.net/apnic.tal"],
&[],
),
);
let err = run(Args {
ours_cir: Some(ours),
peer_cir: Some(peer),
out_json: Some(temp.path().join("summary.json")),
out_md: None,
sample_limit: 20,
})
.unwrap_err();
assert!(err.contains("decode ours CIR failed"), "{err}");
}
#[test]
fn compare_object_maps_reports_hash_mismatch() {
let ours = BTreeMap::from([
("rsync://example/a.roa".to_string(), "11".repeat(32)),
("rsync://example/b.mft".to_string(), "33".repeat(32)),
]);
let peer = BTreeMap::from([
("rsync://example/a.roa".to_string(), "22".repeat(32)),
("rsync://peer.example/c.crl".to_string(), "44".repeat(32)),
]);
let summary = compare_object_maps(&ours, &peer, 20);
assert!(!summary.match_);
assert_eq!(summary.only_in_ours_count, 1);
assert_eq!(summary.only_in_peer_count, 1);
assert_eq!(summary.hash_mismatch_count, 1);
assert_eq!(summary.hash_mismatches.len(), 1);
assert_eq!(
summary.only_in_ours_by_extension.get(".mft").copied(),
Some(1)
);
assert_eq!(
summary.only_in_peer_by_extension.get(".crl").copied(),
Some(1)
);
assert_eq!(
summary.hash_mismatches_by_extension.get(".roa").copied(),
Some(1)
);
}
#[test]
fn uri_grouping_handles_hosts_extensions_and_limit() {
let uris = [
"rsync://b.example/repo/a.ROA",
"rsync://a.example/repo/b.mft",
"rsync://b.example/repo/c",
];
let by_extension = group_by_extension(uris.iter().copied());
assert_eq!(by_extension.get(".roa").copied(), Some(1));
assert_eq!(by_extension.get(".mft").copied(), Some(1));
assert_eq!(by_extension.get("<none>").copied(), Some(1));
let top = top_hosts(uris.iter().copied(), 1);
assert_eq!(top.len(), 1);
assert_eq!(top[0]["host"], "b.example");
assert_eq!(top[0]["count"], 2);
}
fn sample_time() -> time::OffsetDateTime {
time::OffsetDateTime::UNIX_EPOCH
}
fn sample_cir(
objects: &[(&str, u8)],
tals: &[&str],
rejected_objects: &[(&str, Option<&str>)],
) -> CanonicalInputRepresentation {
let rejected_objects = rejected_objects
.iter()
.map(|(object_uri, reason)| CirRejectedObject {
object_uri: (*object_uri).to_string(),
reason: reason.map(str::to_string),
})
.collect::<Vec<_>>();
CanonicalInputRepresentation {
version: CIR_VERSION_V2,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: objects
.iter()
.map(|(rsync_uri, fill)| CirObject {
rsync_uri: (*rsync_uri).to_string(),
sha256: vec![*fill; 32],
})
.collect(),
tals: tals
.iter()
.map(|tal_uri| CirTal {
tal_uri: (*tal_uri).to_string(),
tal_bytes: format!("{tal_uri}\nrsync://example.net/repo/ta.cer\nMIIB")
.into_bytes(),
})
.collect(),
reject_list_sha256: compute_reject_list_sha256(
rejected_objects.iter().map(|item| item.object_uri.as_str()),
),
rejected_objects,
}
}
fn write_cir(path: &Path, cir: &CanonicalInputRepresentation) {
std::fs::write(path, encode_cir(cir).expect("encode cir")).expect("write cir");
}
}

View File

@ -11,6 +11,7 @@ pub mod encode;
pub mod export;
pub mod hash;
pub mod model;
pub mod state_digest;
#[cfg(feature = "full")]
pub mod verify;
@ -41,6 +42,10 @@ pub use model::{
ManifestState, RoaPayloadSet, RoaPayloadState, RouterKey, RouterKeySet, RouterKeyState,
RpkiCanonicalCacheRepresentation, TrustAnchorState,
};
pub use state_digest::{
CcrStateDigestComparison, CcrStateDigestError, CcrStateDigestStateComparison,
CcrStateDigestSummary, compare_state_digests, decode_state_digest_summary,
};
#[cfg(feature = "full")]
pub use verify::{
CcrVerifyError, CcrVerifySummary, extract_vrp_rows, verify_against_report_json_path,

332
src/ccr/state_digest.rs Normal file
View File

@ -0,0 +1,332 @@
use crate::data_model::common::DerReader;
use crate::data_model::oid::{OID_CT_RPKI_CCR_RAW, OID_SHA256_RAW};
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CcrStateDigestSummary {
pub version: u32,
pub hash_alg_oid: String,
pub mfts: Option<Vec<u8>>,
pub vrps: Option<Vec<u8>>,
pub vaps: Option<Vec<u8>>,
pub tas: Option<Vec<u8>>,
pub rks: Option<Vec<u8>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CcrStateDigestComparison {
pub ours: CcrStateDigestSummary,
pub peer: CcrStateDigestSummary,
pub states: Vec<CcrStateDigestStateComparison>,
}
impl CcrStateDigestComparison {
pub fn matches(&self) -> bool {
self.ours.version == self.peer.version
&& self.ours.hash_alg_oid == self.peer.hash_alg_oid
&& self.states.iter().all(|state| state.matches)
}
pub fn mismatched_state_names(&self) -> Vec<&'static str> {
self.states
.iter()
.filter(|state| !state.matches)
.map(|state| state.name)
.collect()
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CcrStateDigestStateComparison {
pub name: &'static str,
pub ours_present: bool,
pub peer_present: bool,
pub ours_hash_hex: Option<String>,
pub peer_hash_hex: Option<String>,
pub matches: bool,
}
#[derive(Debug, thiserror::Error)]
pub enum CcrStateDigestError {
#[error("DER parse error: {0}")]
Parse(String),
#[error("unexpected contentType OID")]
UnexpectedContentType,
#[error("unexpected digest algorithm OID")]
UnexpectedDigestAlgorithm,
#[error("unexpected CCR field tag 0x{0:02X}")]
UnexpectedCcrField(u8),
}
pub fn decode_state_digest_summary(
der: &[u8],
) -> Result<CcrStateDigestSummary, CcrStateDigestError> {
let mut top = DerReader::new(der);
let mut seq = top.take_sequence().map_err(CcrStateDigestError::Parse)?;
if !top.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing bytes after ContentInfo".into(),
));
}
let content_type_raw = seq.take_tag(0x06).map_err(CcrStateDigestError::Parse)?;
if content_type_raw != OID_CT_RPKI_CCR_RAW {
return Err(CcrStateDigestError::UnexpectedContentType);
}
let inner = seq.take_tag(0xA0).map_err(CcrStateDigestError::Parse)?;
if !seq.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing fields in ContentInfo".into(),
));
}
decode_ccr_state_digest_summary(inner)
}
pub fn compare_state_digests(
ours_der: &[u8],
peer_der: &[u8],
) -> Result<CcrStateDigestComparison, CcrStateDigestError> {
let ours = decode_state_digest_summary(ours_der)?;
let peer = decode_state_digest_summary(peer_der)?;
let states = vec![
compare_state("mfts", &ours.mfts, &peer.mfts),
compare_state("vrps", &ours.vrps, &peer.vrps),
compare_state("vaps", &ours.vaps, &peer.vaps),
compare_state("tas", &ours.tas, &peer.tas),
compare_state("rks", &ours.rks, &peer.rks),
];
Ok(CcrStateDigestComparison { ours, peer, states })
}
fn decode_ccr_state_digest_summary(
der: &[u8],
) -> Result<CcrStateDigestSummary, CcrStateDigestError> {
let mut top = DerReader::new(der);
let mut seq = top.take_sequence().map_err(CcrStateDigestError::Parse)?;
if !top.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing bytes after CCR".into(),
));
}
let version = if !seq.is_empty() && seq.peek_tag().map_err(CcrStateDigestError::Parse)? == 0xA0
{
let explicit = seq.take_tag(0xA0).map_err(CcrStateDigestError::Parse)?;
let mut inner = DerReader::new(explicit);
let version = inner.take_uint_u64().map_err(CcrStateDigestError::Parse)? as u32;
if !inner.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing bytes inside CCR version EXPLICIT".into(),
));
}
version
} else {
crate::ccr::model::CCR_VERSION_V0
};
let hash_alg_oid =
decode_digest_algorithm_oid(seq.take_sequence().map_err(CcrStateDigestError::Parse)?)?;
let _produced_at = seq.take_tag(0x18).map_err(CcrStateDigestError::Parse)?;
let mut mfts = None;
let mut vrps = None;
let mut vaps = None;
let mut tas = None;
let mut rks = None;
while !seq.is_empty() {
let (tag, value) = seq.take_any().map_err(CcrStateDigestError::Parse)?;
match tag {
0xA1 => mfts = Some(read_state_hash(value, StateShape::Manifest)?),
0xA2 => vrps = Some(read_state_hash(value, StateShape::PayloadThenHash)?),
0xA3 => vaps = Some(read_state_hash(value, StateShape::PayloadThenHash)?),
0xA4 => tas = Some(read_state_hash(value, StateShape::PayloadThenHash)?),
0xA5 => rks = Some(read_state_hash(value, StateShape::PayloadThenHash)?),
other => return Err(CcrStateDigestError::UnexpectedCcrField(other)),
}
}
Ok(CcrStateDigestSummary {
version,
hash_alg_oid,
mfts,
vrps,
vaps,
tas,
rks,
})
}
#[derive(Clone, Copy)]
enum StateShape {
Manifest,
PayloadThenHash,
}
fn read_state_hash(explicit_der: &[u8], shape: StateShape) -> Result<Vec<u8>, CcrStateDigestError> {
let mut outer = DerReader::new(explicit_der);
let mut seq = outer.take_sequence().map_err(CcrStateDigestError::Parse)?;
if !outer.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing bytes after CCR state".into(),
));
}
match shape {
StateShape::Manifest => {
seq.skip_any().map_err(CcrStateDigestError::Parse)?;
seq.take_tag(0x18).map_err(CcrStateDigestError::Parse)?;
}
StateShape::PayloadThenHash => {
seq.skip_any().map_err(CcrStateDigestError::Parse)?;
}
}
let hash = seq
.take_octet_string()
.map_err(CcrStateDigestError::Parse)?
.to_vec();
if hash.len() != crate::ccr::model::DIGEST_LEN_SHA256 {
return Err(CcrStateDigestError::Parse(format!(
"state hash must be {} bytes, got {}",
crate::ccr::model::DIGEST_LEN_SHA256,
hash.len()
)));
}
if !seq.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing fields after CCR state hash".into(),
));
}
Ok(hash)
}
fn decode_digest_algorithm_oid(mut seq: DerReader<'_>) -> Result<String, CcrStateDigestError> {
let oid_raw = seq.take_tag(0x06).map_err(CcrStateDigestError::Parse)?;
if oid_raw != OID_SHA256_RAW {
return Err(CcrStateDigestError::UnexpectedDigestAlgorithm);
}
if !seq.is_empty() {
let tag = seq.peek_tag().map_err(CcrStateDigestError::Parse)?;
if tag == 0x05 {
let null = seq.take_tag(0x05).map_err(CcrStateDigestError::Parse)?;
if !null.is_empty() {
return Err(CcrStateDigestError::Parse(
"AlgorithmIdentifier NULL parameters must be empty".into(),
));
}
}
}
if !seq.is_empty() {
return Err(CcrStateDigestError::Parse(
"trailing fields in DigestAlgorithmIdentifier".into(),
));
}
Ok(crate::data_model::oid::OID_SHA256.to_string())
}
fn compare_state(
name: &'static str,
ours: &Option<Vec<u8>>,
peer: &Option<Vec<u8>>,
) -> CcrStateDigestStateComparison {
CcrStateDigestStateComparison {
name,
ours_present: ours.is_some(),
peer_present: peer.is_some(),
ours_hash_hex: ours.as_ref().map(hex::encode),
peer_hash_hex: peer.as_ref().map(hex::encode),
matches: ours == peer,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ccr::{
CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation,
build_aspa_payload_state, build_roa_payload_state, encode_content_info,
};
use crate::data_model::roa::{IpPrefix, RoaAfi};
use crate::validation::objects::{AspaAttestation, Vrp};
fn sample_content(produced_at: time::OffsetDateTime) -> CcrContentInfo {
let vrps = build_roa_payload_state(&[Vrp {
asn: 64496,
prefix: IpPrefix {
afi: RoaAfi::Ipv4,
prefix_len: 24,
addr: [192, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
},
max_length: 24,
}])
.expect("build vrps");
let vaps = build_aspa_payload_state(&[AspaAttestation {
customer_as_id: 64496,
provider_as_ids: vec![64497],
}])
.expect("build vaps");
CcrContentInfo::new(RpkiCanonicalCacheRepresentation {
version: 0,
hash_alg: CcrDigestAlgorithm::Sha256,
produced_at,
mfts: None,
vrps: Some(vrps),
vaps: Some(vaps),
tas: None,
rks: None,
})
}
#[test]
fn decode_state_digest_summary_extracts_hashes_without_full_model() {
let content = sample_content(time::OffsetDateTime::UNIX_EPOCH);
let der = encode_content_info(&content).expect("encode");
let summary = decode_state_digest_summary(&der).expect("summary");
assert_eq!(summary.version, 0);
assert_eq!(summary.hash_alg_oid, crate::data_model::oid::OID_SHA256);
assert_eq!(
summary.vrps.as_deref(),
content
.content
.vrps
.as_ref()
.map(|state| state.hash.as_slice())
);
assert_eq!(
summary.vaps.as_deref(),
content
.content
.vaps
.as_ref()
.map(|state| state.hash.as_slice())
);
assert!(summary.mfts.is_none());
assert!(summary.tas.is_none());
assert!(summary.rks.is_none());
}
#[test]
fn compare_state_digests_ignores_produced_at() {
let first = encode_content_info(&sample_content(time::OffsetDateTime::UNIX_EPOCH))
.expect("encode first");
let second = encode_content_info(&sample_content(
time::OffsetDateTime::UNIX_EPOCH + time::Duration::seconds(60),
))
.expect("encode second");
let comparison = compare_state_digests(&first, &second).expect("compare");
assert!(comparison.matches());
assert!(comparison.mismatched_state_names().is_empty());
}
#[test]
fn compare_state_digests_reports_mismatched_state() {
let first = encode_content_info(&sample_content(time::OffsetDateTime::UNIX_EPOCH))
.expect("encode first");
let mut second_content =
sample_content(time::OffsetDateTime::UNIX_EPOCH + time::Duration::seconds(60));
second_content.content.vaps.as_mut().expect("vaps").hash[0] ^= 0xFF;
let second = encode_content_info(&second_content).expect("encode second");
let comparison = compare_state_digests(&first, &second).expect("compare");
assert!(!comparison.matches());
assert_eq!(comparison.mismatched_state_names(), vec!["vaps"]);
}
}

View File

@ -14,7 +14,7 @@ use crate::cir::static_pool::{
};
use crate::current_repo_index::CurrentRepoObject;
use crate::data_model::ta::TrustAnchor;
use crate::storage::{RepositoryViewState, RocksStore};
use crate::storage::RocksStore;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirExportTiming {
@ -25,9 +25,6 @@ pub struct CirExportTiming {
#[derive(Debug, thiserror::Error)]
pub enum CirExportError {
#[error("list repository_view entries failed: {0}")]
ListRepositoryView(String),
#[error("CIR TAL URI must be http(s), got: {0}")]
InvalidTalUri(String),
@ -37,6 +34,13 @@ pub enum CirExportError {
#[error("CIR model validation failed: {0}")]
Validate(String),
#[error("CIR consumed audit has conflicting hashes for {rsync_uri}: {first} vs {second}")]
ConflictingObjectHash {
rsync_uri: String,
first: String,
second: String,
},
#[error("encode CIR failed: {0}")]
Encode(#[from] CirEncodeError),
@ -70,34 +74,45 @@ pub struct CirTalBinding<'a> {
pub tal_uri: &'a str,
}
fn collect_cir_objects_from_current_repo(
current_repo_objects: &[CurrentRepoObject],
) -> BTreeMap<String, String> {
let mut objects = BTreeMap::new();
for entry in current_repo_objects {
objects.insert(
entry.rsync_uri.clone(),
entry.current_hash_hex.to_ascii_lowercase(),
);
}
objects
fn is_sha256_hex(value: &str) -> bool {
value.len() == 64 && value.as_bytes().iter().all(u8::is_ascii_hexdigit)
}
fn collect_cir_objects_from_repository_view(
store: &RocksStore,
fn insert_consumed_object_hash(
objects: &mut BTreeMap<String, String>,
rsync_uri: &str,
sha256_hex: &str,
) -> Result<(), CirExportError> {
if !rsync_uri.starts_with("rsync://") || !is_sha256_hex(sha256_hex) {
return Ok(());
}
let normalized = sha256_hex.to_ascii_lowercase();
if let Some(existing) = objects.get(rsync_uri) {
if existing != &normalized {
return Err(CirExportError::ConflictingObjectHash {
rsync_uri: rsync_uri.to_string(),
first: existing.clone(),
second: normalized,
});
}
return Ok(());
}
objects.insert(rsync_uri.to_string(), normalized);
Ok(())
}
fn collect_cir_objects_from_validation_audit(
publication_points: &[PublicationPointAudit],
) -> Result<BTreeMap<String, String>, CirExportError> {
let entries = store
.list_repository_view_entries_with_prefix("rsync://")
.map_err(|e| CirExportError::ListRepositoryView(e.to_string()))?;
let mut objects = BTreeMap::new();
for entry in entries {
if matches!(
entry.state,
RepositoryViewState::Present | RepositoryViewState::Replaced
) && let Some(hash) = entry.current_hash
{
objects.insert(entry.rsync_uri, hash.to_ascii_lowercase());
for pp in publication_points {
for obj in &pp.objects {
if !matches!(obj.result, AuditObjectResult::Ok | AuditObjectResult::Error) {
continue;
}
insert_consumed_object_hash(&mut objects, &obj.rsync_uri, &obj.sha256_hex)?;
}
}
Ok(objects)
@ -123,11 +138,11 @@ pub fn build_cir_from_run(
}
pub fn build_cir_from_run_multi(
store: &RocksStore,
_store: &RocksStore,
tal_bindings: &[CirTalBinding<'_>],
validation_time: time::OffsetDateTime,
publication_points: &[PublicationPointAudit],
current_repo_objects: Option<&[CurrentRepoObject]>,
_current_repo_objects: Option<&[CurrentRepoObject]>,
) -> Result<CanonicalInputRepresentation, CirExportError> {
for binding in tal_bindings {
if !(binding.tal_uri.starts_with("https://") || binding.tal_uri.starts_with("http://")) {
@ -135,31 +150,7 @@ pub fn build_cir_from_run_multi(
}
}
let mut objects = if let Some(current_repo_objects) = current_repo_objects {
collect_cir_objects_from_current_repo(current_repo_objects)
} else {
collect_cir_objects_from_repository_view(store)?
};
// CIR must describe the actual input world used by validation. When a
// publication point falls back to the latest validated current instance,
// repository_view may not contain the reused manifest/object set. Pull
// those object hashes from the audit so replay can reconstruct the same
// world state.
for pp in publication_points {
if pp.source != "vcir_current_instance" {
continue;
}
for obj in &pp.objects {
if obj.result != AuditObjectResult::Ok {
continue;
}
if !obj.rsync_uri.starts_with("rsync://") {
continue;
}
objects.insert(obj.rsync_uri.clone(), obj.sha256_hex.to_ascii_lowercase());
}
}
let mut objects = collect_cir_objects_from_validation_audit(publication_points)?;
let mut tals = Vec::with_capacity(tal_bindings.len());
for binding in tal_bindings {
@ -420,10 +411,9 @@ fn ta_sha256_hex(bytes: &[u8]) -> String {
mod tests {
use super::*;
use crate::cir::decode::decode_cir;
use crate::current_repo_index::CurrentRepoObject;
use crate::data_model::ta::TrustAnchor;
use crate::data_model::tal::Tal;
use crate::storage::{RawByHashEntry, RepositoryViewEntry, RepositoryViewState, RocksStore};
use crate::storage::{RawByHashEntry, RocksStore};
fn sample_time() -> time::OffsetDateTime {
time::OffsetDateTime::parse(
@ -465,25 +455,38 @@ mod tests {
hex::encode(Sha256::digest(bytes))
}
fn audit_entry(
uri: &str,
hash: &str,
kind: crate::audit::AuditObjectKind,
result: crate::audit::AuditObjectResult,
detail: Option<&str>,
) -> crate::audit::ObjectAuditEntry {
crate::audit::ObjectAuditEntry {
rsync_uri: uri.to_string(),
sha256_hex: hash.to_string(),
kind,
result,
detail: detail.map(ToString::to_string),
}
}
#[test]
fn build_cir_from_run_collects_repository_view_and_tal() {
fn build_cir_from_run_collects_consumed_audit_objects_and_tal() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let bytes = b"object-a".to_vec();
let hash = sha256_hex(&bytes);
let mut raw = RawByHashEntry::from_bytes(hash.clone(), bytes.clone());
raw.origin_uris
.push("rsync://example.test/repo/a.cer".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/a.cer".to_string(),
current_hash: Some(hash),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("cer".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let publication_points = vec![PublicationPointAudit {
objects: vec![audit_entry(
"rsync://example.test/repo/a.cer",
&hash,
crate::audit::AuditObjectKind::Certificate,
crate::audit::AuditObjectResult::Ok,
None,
)],
..PublicationPointAudit::default()
}];
let ta = sample_trust_anchor();
let cir = build_cir_from_run(
@ -491,7 +494,7 @@ mod tests {
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
&publication_points,
)
.expect("build cir");
assert_eq!(cir.version, CIR_VERSION_V2);
@ -523,15 +526,16 @@ mod tests {
raw.origin_uris
.push("rsync://example.test/repo/b.roa".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/b.roa".to_string(),
current_hash: Some(hash.clone()),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("roa".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let publication_points = vec![PublicationPointAudit {
objects: vec![audit_entry(
"rsync://example.test/repo/b.roa",
&hash,
crate::audit::AuditObjectKind::Roa,
crate::audit::AuditObjectResult::Ok,
None,
)],
..PublicationPointAudit::default()
}];
let ta = sample_trust_anchor();
let cir_path = out_dir.join("example.cir");
@ -540,7 +544,7 @@ mod tests {
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
&publication_points,
&cir_path,
sample_date(),
)
@ -568,15 +572,16 @@ mod tests {
raw.origin_uris
.push("rsync://example.test/repo/d.roa".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/d.roa".to_string(),
current_hash: Some(hash.clone()),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("roa".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let publication_points = vec![PublicationPointAudit {
objects: vec![audit_entry(
"rsync://example.test/repo/d.roa",
&hash,
crate::audit::AuditObjectKind::Roa,
crate::audit::AuditObjectResult::Ok,
None,
)],
..PublicationPointAudit::default()
}];
let ta = sample_trust_anchor();
let cir_path = out_dir.join("example.cir");
@ -585,7 +590,7 @@ mod tests {
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
&publication_points,
&cir_path,
sample_date(),
)
@ -625,7 +630,7 @@ mod tests {
}
#[test]
fn build_cir_from_run_includes_vcir_current_instance_objects_from_audit() {
fn build_cir_from_run_includes_consumed_vcir_current_instance_objects_from_audit() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let ta = sample_trust_anchor();
@ -673,25 +678,27 @@ mod tests {
}
#[test]
fn build_cir_from_run_multi_uses_current_repo_objects_without_repository_view() {
fn build_cir_from_run_multi_ignores_current_repo_superfluous_objects() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let ta1 = sample_trust_anchor();
let ta2 = sample_arin_trust_anchor();
let current_repo_objects = vec![
CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/a.roa".to_string(),
let current_repo_objects = vec![crate::current_repo_index::CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/superfluous.roa".to_string(),
current_hash_hex: "11".repeat(32),
repository_source: "https://rrdp.example.test/notification.xml".to_string(),
object_type: Some("roa".to_string()),
},
CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/b.cer".to_string(),
current_hash_hex: "22".repeat(32),
repository_source: "https://rrdp.example.test/notification.xml".to_string(),
object_type: Some("cer".to_string()),
},
];
}];
let publication_points = vec![PublicationPointAudit {
objects: vec![audit_entry(
"rsync://example.test/repo/consumed.roa",
&"22".repeat(32),
crate::audit::AuditObjectKind::Roa,
crate::audit::AuditObjectResult::Ok,
None,
)],
..PublicationPointAudit::default()
}];
let cir = build_cir_from_run_multi(
&store,
@ -706,21 +713,22 @@ mod tests {
},
],
sample_time(),
&[],
&publication_points,
Some(&current_repo_objects),
)
.expect("build cir from current repo objects");
.expect("build cir from consumed audit objects");
assert_eq!(cir.tals.len(), 2);
assert!(
cir.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/a.roa")
.any(|item| item.rsync_uri == "rsync://example.test/repo/consumed.roa")
);
assert!(
cir.objects
!cir.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/b.cer")
.any(|item| item.rsync_uri == "rsync://example.test/repo/superfluous.roa"),
"current repo objects must not be included unless validation consumed them",
);
assert!(
cir.objects.iter().any(|item| {
@ -773,20 +781,6 @@ mod tests {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let ta = sample_trust_anchor();
let current_repo_objects = vec![
CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/a.roa".to_string(),
current_hash_hex: "11".repeat(32),
repository_source: "https://rrdp.example.test/notification.xml".to_string(),
object_type: Some("roa".to_string()),
},
CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/b.asa".to_string(),
current_hash_hex: "22".repeat(32),
repository_source: "https://rrdp.example.test/notification.xml".to_string(),
object_type: Some("aspa".to_string()),
},
];
let publication_points = vec![PublicationPointAudit {
objects: vec![
crate::audit::ObjectAuditEntry {
@ -822,11 +816,11 @@ mod tests {
}],
sample_time(),
&publication_points,
Some(&current_repo_objects),
None,
)
.expect("build cir");
assert_eq!(cir.rejected_objects.len(), 1);
assert_eq!(cir.rejected_objects.len(), 2);
assert_eq!(
cir.rejected_objects[0].object_uri,
"rsync://example.test/repo/a.roa"
@ -835,6 +829,16 @@ mod tests {
cir.rejected_objects[0].reason.as_deref(),
Some("invalid roa")
);
assert_eq!(
cir.rejected_objects[1].object_uri,
"rsync://example.test/repo/c.roa"
);
assert!(
!cir.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/b.asa"),
"skipped audit objects are not considered consumed input",
);
}
#[test]
@ -842,12 +846,6 @@ mod tests {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let ta = sample_trust_anchor();
let current_repo_objects = vec![CurrentRepoObject {
rsync_uri: "rsync://example.test/repo/a.roa".to_string(),
current_hash_hex: "11".repeat(32),
repository_source: "https://rrdp.example.test/notification.xml".to_string(),
object_type: Some("roa".to_string()),
}];
let mk_pp = |detail: &str| PublicationPointAudit {
objects: vec![crate::audit::ObjectAuditEntry {
@ -868,7 +866,7 @@ mod tests {
}],
sample_time(),
&[mk_pp("reason-a")],
Some(&current_repo_objects),
None,
)
.expect("build cir a");
let cir_b = build_cir_from_run_multi(
@ -879,7 +877,7 @@ mod tests {
}],
sample_time(),
&[mk_pp("reason-b")],
Some(&current_repo_objects),
None,
)
.expect("build cir b");
@ -936,15 +934,16 @@ mod tests {
raw.origin_uris
.push("rsync://example.test/repo/z.roa".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/z.roa".to_string(),
current_hash: Some(hash.clone()),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("roa".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let publication_points = vec![PublicationPointAudit {
objects: vec![audit_entry(
"rsync://example.test/repo/z.roa",
&hash,
crate::audit::AuditObjectKind::Roa,
crate::audit::AuditObjectResult::Ok,
None,
)],
..PublicationPointAudit::default()
}];
let cir = build_cir_from_run_multi(
&store,
@ -959,7 +958,7 @@ mod tests {
},
],
sample_time(),
&[],
&publication_points,
None,
)
.expect("build cir");

View File

@ -1001,14 +1001,6 @@ impl PostValidationShared {
.collect()
}
}
fn current_repo_objects(&self) -> Option<&[crate::current_repo_index::CurrentRepoObject]> {
if self.current_repo_objects.is_empty() {
None
} else {
Some(self.current_repo_objects.as_ref())
}
}
}
fn build_report(
@ -1521,7 +1513,7 @@ pub fn run(argv: &[String]) -> Result<(), String> {
let total_started = std::time::Instant::now();
let validation_started = std::time::Instant::now();
let collect_current_repo_objects = args.cir_enabled;
let collect_current_repo_objects = false;
let out = if delta_replay_mode {
let tal_path = args
.tal_path
@ -1832,7 +1824,7 @@ pub fn run(argv: &[String]) -> Result<(), String> {
shared.publication_points.as_ref(),
cir_out_path,
time::OffsetDateTime::now_utc().date(),
shared.current_repo_objects(),
None,
)
.map_err(|e| e.to_string())?;
cir_build_cir_ms = Some(summary.timing.build_cir_ms);