20260408_2 增加CIR sequence,未验证drop analysis,遇到问题是static pool保存太慢,拖慢整体录制,待解决

This commit is contained in:
yuyr 2026-04-09 16:08:11 +08:00
parent c9ef5aaf4c
commit e083fe4daa
26 changed files with 3233 additions and 15 deletions

View File

@ -0,0 +1,32 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/fetch_cir_sequence_from_remote.sh \
--ssh-target <user@host> \
--remote-path <path> \
--local-path <path>
EOF
}
SSH_TARGET=""
REMOTE_PATH=""
LOCAL_PATH=""
while [[ $# -gt 0 ]]; do
case "$1" in
--ssh-target) SSH_TARGET="$2"; shift 2 ;;
--remote-path) REMOTE_PATH="$2"; shift 2 ;;
--local-path) LOCAL_PATH="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$SSH_TARGET" && -n "$REMOTE_PATH" && -n "$LOCAL_PATH" ]] || { usage >&2; exit 2; }
mkdir -p "$(dirname "$LOCAL_PATH")"
rsync -a "$SSH_TARGET:$REMOTE_PATH/" "$LOCAL_PATH/"
echo "done: $LOCAL_PATH"

View File

@ -0,0 +1,78 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_drop_sequence.sh \
--sequence-root <path> \
[--drop-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEQUENCE_ROOT=""
DROP_BIN="${DROP_BIN:-$ROOT_DIR/target/release/cir_drop_report}"
while [[ $# -gt 0 ]]; do
case "$1" in
--sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;;
--drop-bin) DROP_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$SEQUENCE_ROOT" ]] || { usage >&2; exit 2; }
python3 - <<'PY' "$SEQUENCE_ROOT" "$DROP_BIN"
import json
import subprocess
import sys
from pathlib import Path
sequence_root = Path(sys.argv[1]).resolve()
drop_bin = sys.argv[2]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"]
summaries = []
for step in sequence["steps"]:
step_id = step["stepId"]
out_dir = sequence_root / "drop" / step_id
out_dir.mkdir(parents=True, exist_ok=True)
cmd = [
drop_bin,
"--cir",
str(sequence_root / step["cirPath"]),
"--ccr",
str(sequence_root / step["ccrPath"]),
"--report-json",
str(sequence_root / step["reportPath"]),
"--static-root",
str(static_root),
"--json-out",
str(out_dir / "drop.json"),
"--md-out",
str(out_dir / "drop.md"),
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise SystemExit(
f"drop report failed for {step_id}: stdout={proc.stdout} stderr={proc.stderr}"
)
result = json.loads((out_dir / "drop.json").read_text(encoding="utf-8"))
summaries.append(
{
"stepId": step_id,
"droppedVrpCount": result["summary"]["droppedVrpCount"],
"droppedObjectCount": result["summary"]["droppedObjectCount"],
"reportPath": str(out_dir / "drop.json"),
}
)
summary = {"version": 1, "steps": summaries}
(sequence_root / "drop-summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
PY
echo "done: $SEQUENCE_ROOT"

View File

@ -0,0 +1,173 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_full_delta.sh \
--out-dir <path> \
--tal-path <path> \
--ta-path <path> \
--cir-tal-uri <url> \
--payload-replay-archive <path> \
--payload-replay-locks <path> \
--payload-base-archive <path> \
--payload-base-locks <path> \
--payload-delta-archive <path> \
--payload-delta-locks <path> \
[--base-validation-time <rfc3339>] \
[--delta-validation-time <rfc3339>] \
[--max-depth <n>] \
[--max-instances <n>] \
[--rpki-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
OUT_DIR=""
TAL_PATH=""
TA_PATH=""
CIR_TAL_URI=""
PAYLOAD_REPLAY_ARCHIVE=""
PAYLOAD_REPLAY_LOCKS=""
PAYLOAD_BASE_ARCHIVE=""
PAYLOAD_BASE_LOCKS=""
PAYLOAD_DELTA_ARCHIVE=""
PAYLOAD_DELTA_LOCKS=""
BASE_VALIDATION_TIME=""
DELTA_VALIDATION_TIME=""
MAX_DEPTH=0
MAX_INSTANCES=1
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
while [[ $# -gt 0 ]]; do
case "$1" in
--out-dir) OUT_DIR="$2"; shift 2 ;;
--tal-path) TAL_PATH="$2"; shift 2 ;;
--ta-path) TA_PATH="$2"; shift 2 ;;
--cir-tal-uri) CIR_TAL_URI="$2"; shift 2 ;;
--payload-replay-archive) PAYLOAD_REPLAY_ARCHIVE="$2"; shift 2 ;;
--payload-replay-locks) PAYLOAD_REPLAY_LOCKS="$2"; shift 2 ;;
--payload-base-archive) PAYLOAD_BASE_ARCHIVE="$2"; shift 2 ;;
--payload-base-locks) PAYLOAD_BASE_LOCKS="$2"; shift 2 ;;
--payload-delta-archive) PAYLOAD_DELTA_ARCHIVE="$2"; shift 2 ;;
--payload-delta-locks) PAYLOAD_DELTA_LOCKS="$2"; shift 2 ;;
--base-validation-time) BASE_VALIDATION_TIME="$2"; shift 2 ;;
--delta-validation-time) DELTA_VALIDATION_TIME="$2"; shift 2 ;;
--max-depth) MAX_DEPTH="$2"; shift 2 ;;
--max-instances) MAX_INSTANCES="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$OUT_DIR" && -n "$TAL_PATH" && -n "$TA_PATH" && -n "$CIR_TAL_URI" && -n "$PAYLOAD_REPLAY_ARCHIVE" && -n "$PAYLOAD_REPLAY_LOCKS" && -n "$PAYLOAD_BASE_ARCHIVE" && -n "$PAYLOAD_BASE_LOCKS" && -n "$PAYLOAD_DELTA_ARCHIVE" && -n "$PAYLOAD_DELTA_LOCKS" ]] || {
usage >&2
exit 2
}
if [[ ! -x "$RPKI_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin rpki
)
fi
resolve_validation_time() {
local path="$1"
python3 - <<'PY' "$path"
import json, sys
print(json.load(open(sys.argv[1], 'r', encoding='utf-8'))['validationTime'])
PY
}
if [[ -z "$BASE_VALIDATION_TIME" ]]; then
BASE_VALIDATION_TIME="$(resolve_validation_time "$PAYLOAD_REPLAY_LOCKS")"
fi
if [[ -z "$DELTA_VALIDATION_TIME" ]]; then
DELTA_VALIDATION_TIME="$(resolve_validation_time "$PAYLOAD_DELTA_LOCKS")"
fi
rm -rf "$OUT_DIR"
mkdir -p "$OUT_DIR/full" "$OUT_DIR/delta-001" "$OUT_DIR/static"
FULL_DB="$OUT_DIR/full/db"
DELTA_DB="$OUT_DIR/delta-001/db"
"$RPKI_BIN" \
--db "$FULL_DB" \
--tal-path "$TAL_PATH" \
--ta-path "$TA_PATH" \
--payload-replay-archive "$PAYLOAD_REPLAY_ARCHIVE" \
--payload-replay-locks "$PAYLOAD_REPLAY_LOCKS" \
--validation-time "$BASE_VALIDATION_TIME" \
--max-depth "$MAX_DEPTH" \
--max-instances "$MAX_INSTANCES" \
--ccr-out "$OUT_DIR/full/result.ccr" \
--report-json "$OUT_DIR/full/report.json" \
--cir-enable \
--cir-out "$OUT_DIR/full/input.cir" \
--cir-static-root "$OUT_DIR/static" \
--cir-tal-uri "$CIR_TAL_URI" \
>"$OUT_DIR/full/run.stdout.log" 2>"$OUT_DIR/full/run.stderr.log"
"$RPKI_BIN" \
--db "$DELTA_DB" \
--tal-path "$TAL_PATH" \
--ta-path "$TA_PATH" \
--payload-base-archive "$PAYLOAD_BASE_ARCHIVE" \
--payload-base-locks "$PAYLOAD_BASE_LOCKS" \
--payload-delta-archive "$PAYLOAD_DELTA_ARCHIVE" \
--payload-delta-locks "$PAYLOAD_DELTA_LOCKS" \
--payload-base-validation-time "$BASE_VALIDATION_TIME" \
--validation-time "$DELTA_VALIDATION_TIME" \
--max-depth "$MAX_DEPTH" \
--max-instances "$MAX_INSTANCES" \
--ccr-out "$OUT_DIR/delta-001/result.ccr" \
--report-json "$OUT_DIR/delta-001/report.json" \
--cir-enable \
--cir-out "$OUT_DIR/delta-001/input.cir" \
--cir-static-root "$OUT_DIR/static" \
--cir-tal-uri "$CIR_TAL_URI" \
>"$OUT_DIR/delta-001/run.stdout.log" 2>"$OUT_DIR/delta-001/run.stderr.log"
python3 - <<'PY' "$OUT_DIR" "$BASE_VALIDATION_TIME" "$DELTA_VALIDATION_TIME"
import json
import os
import sys
from pathlib import Path
out = Path(sys.argv[1])
base_validation_time = sys.argv[2]
delta_validation_time = sys.argv[3]
static_files = sum(1 for _ in (out / "static").rglob("*") if _.is_file())
summary = {
"version": 1,
"kind": "cir_pair",
"baseValidationTime": base_validation_time,
"deltaValidationTime": delta_validation_time,
"staticRoot": "static",
"steps": [
{
"kind": "full",
"cirPath": "full/input.cir",
"ccrPath": "full/result.ccr",
"reportPath": "full/report.json",
},
{
"kind": "delta",
"cirPath": "delta-001/input.cir",
"ccrPath": "delta-001/result.ccr",
"reportPath": "delta-001/report.json",
"previous": "full",
},
],
"staticFileCount": static_files,
}
(out / "summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
PY
echo "done: $OUT_DIR"

View File

@ -0,0 +1,129 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_multi_rir_offline.sh \
[--bundle-root <path>] \
[--rir <afrinic,apnic,arin,lacnic,ripe>] \
[--delta-count <n>] \
[--full-repo] \
[--out-root <path>] \
[--rpki-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CASE_INFO="$ROOT_DIR/scripts/payload_replay/multi_rir_case_info.py"
SINGLE_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_record_sequence_offline.sh"
BUNDLE_ROOT="/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3"
RIRS="afrinic,apnic,arin,lacnic,ripe"
DELTA_COUNT=2
FULL_REPO=0
OUT_ROOT="$ROOT_DIR/target/replay/cir_sequence_multi_rir_offline_$(date -u +%Y%m%dT%H%M%SZ)"
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
while [[ $# -gt 0 ]]; do
case "$1" in
--bundle-root) BUNDLE_ROOT="$2"; shift 2 ;;
--rir) RIRS="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
--full-repo) FULL_REPO=1; shift 1 ;;
--out-root) OUT_ROOT="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
mkdir -p "$OUT_ROOT"
SUMMARY_JSON="$OUT_ROOT/summary.json"
SUMMARY_MD="$OUT_ROOT/summary.md"
IFS=',' read -r -a RIR_ITEMS <<< "$RIRS"
for rir in "${RIR_ITEMS[@]}"; do
CASE_JSON="$(python3 "$CASE_INFO" --bundle-root "$BUNDLE_ROOT" --repo-root "$ROOT_DIR" --rir "$rir")"
TAL_PATH="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['tal_path'])
PY
)"
TA_PATH="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['ta_path'])
PY
)"
BASE_ARCHIVE="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['base_archive'])
PY
)"
BASE_LOCKS="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['base_locks'])
PY
)"
DELTA_ARCHIVE="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['delta_archive'])
PY
)"
DELTA_LOCKS="$(python3 - <<'PY' "$CASE_JSON"
import json,sys
print(json.loads(sys.argv[1])['delta_locks'])
PY
)"
OUT_DIR="$OUT_ROOT/$rir"
args=(
"$SINGLE_SCRIPT"
--out-dir "$OUT_DIR" \
--tal-path "$TAL_PATH" \
--ta-path "$TA_PATH" \
--cir-tal-uri "https://example.test/$rir.tal" \
--payload-replay-archive "$BASE_ARCHIVE" \
--payload-replay-locks "$BASE_LOCKS" \
--payload-base-archive "$BASE_ARCHIVE" \
--payload-base-locks "$BASE_LOCKS" \
--payload-delta-archive "$DELTA_ARCHIVE" \
--payload-delta-locks "$DELTA_LOCKS" \
--delta-count "$DELTA_COUNT" \
--rpki-bin "$RPKI_BIN"
)
if [[ "$FULL_REPO" -ne 1 ]]; then
args+=(--max-depth 0 --max-instances 1)
else
args+=(--full-repo)
fi
"${args[@]}"
done
python3 - <<'PY' "$OUT_ROOT" "$RIRS" "$SUMMARY_JSON" "$SUMMARY_MD"
import json, sys
from pathlib import Path
out_root = Path(sys.argv[1])
rirs = [item for item in sys.argv[2].split(',') if item]
summary_json = Path(sys.argv[3])
summary_md = Path(sys.argv[4])
items = []
for rir in rirs:
root = out_root / rir
seq = json.loads((root / "sequence.json").read_text(encoding="utf-8"))
summ = json.loads((root / "summary.json").read_text(encoding="utf-8"))
items.append({
"rir": rir,
"root": str(root),
"stepCount": len(seq["steps"]),
"staticFileCount": summ["staticFileCount"],
})
summary = {"version": 1, "rirs": items}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = ["# Multi-RIR Offline CIR Sequence Summary", ""]
for item in items:
lines.append(f"- `{item['rir']}`: `stepCount={item['stepCount']}` `staticFileCount={item['staticFileCount']}` `root={item['root']}`")
summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8")
PY
echo "done: $OUT_ROOT"

View File

@ -0,0 +1,206 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_offline.sh \
--out-dir <path> \
--tal-path <path> \
--ta-path <path> \
--cir-tal-uri <url> \
--payload-replay-archive <path> \
--payload-replay-locks <path> \
--payload-base-archive <path> \
--payload-base-locks <path> \
--payload-delta-archive <path> \
--payload-delta-locks <path> \
[--delta-count <n>] \
[--base-validation-time <rfc3339>] \
[--delta-validation-time <rfc3339>] \
[--full-repo] \
[--max-depth <n>] \
[--max-instances <n>] \
[--rpki-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
OUT_DIR=""
TAL_PATH=""
TA_PATH=""
CIR_TAL_URI=""
PAYLOAD_REPLAY_ARCHIVE=""
PAYLOAD_REPLAY_LOCKS=""
PAYLOAD_BASE_ARCHIVE=""
PAYLOAD_BASE_LOCKS=""
PAYLOAD_DELTA_ARCHIVE=""
PAYLOAD_DELTA_LOCKS=""
BASE_VALIDATION_TIME=""
DELTA_VALIDATION_TIME=""
DELTA_COUNT=2
FULL_REPO=0
MAX_DEPTH=0
MAX_INSTANCES=1
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
while [[ $# -gt 0 ]]; do
case "$1" in
--out-dir) OUT_DIR="$2"; shift 2 ;;
--tal-path) TAL_PATH="$2"; shift 2 ;;
--ta-path) TA_PATH="$2"; shift 2 ;;
--cir-tal-uri) CIR_TAL_URI="$2"; shift 2 ;;
--payload-replay-archive) PAYLOAD_REPLAY_ARCHIVE="$2"; shift 2 ;;
--payload-replay-locks) PAYLOAD_REPLAY_LOCKS="$2"; shift 2 ;;
--payload-base-archive) PAYLOAD_BASE_ARCHIVE="$2"; shift 2 ;;
--payload-base-locks) PAYLOAD_BASE_LOCKS="$2"; shift 2 ;;
--payload-delta-archive) PAYLOAD_DELTA_ARCHIVE="$2"; shift 2 ;;
--payload-delta-locks) PAYLOAD_DELTA_LOCKS="$2"; shift 2 ;;
--base-validation-time) BASE_VALIDATION_TIME="$2"; shift 2 ;;
--delta-validation-time) DELTA_VALIDATION_TIME="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
--full-repo) FULL_REPO=1; shift 1 ;;
--max-depth) MAX_DEPTH="$2"; shift 2 ;;
--max-instances) MAX_INSTANCES="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$OUT_DIR" && -n "$TAL_PATH" && -n "$TA_PATH" && -n "$CIR_TAL_URI" && -n "$PAYLOAD_REPLAY_ARCHIVE" && -n "$PAYLOAD_REPLAY_LOCKS" && -n "$PAYLOAD_BASE_ARCHIVE" && -n "$PAYLOAD_BASE_LOCKS" && -n "$PAYLOAD_DELTA_ARCHIVE" && -n "$PAYLOAD_DELTA_LOCKS" ]] || {
usage >&2
exit 2
}
if [[ ! -x "$RPKI_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin rpki
)
fi
resolve_validation_time() {
local path="$1"
python3 - <<'PY' "$path"
import json, sys
print(json.load(open(sys.argv[1], 'r', encoding='utf-8'))['validationTime'])
PY
}
if [[ -z "$BASE_VALIDATION_TIME" ]]; then
BASE_VALIDATION_TIME="$(resolve_validation_time "$PAYLOAD_REPLAY_LOCKS")"
fi
if [[ -z "$DELTA_VALIDATION_TIME" ]]; then
DELTA_VALIDATION_TIME="$(resolve_validation_time "$PAYLOAD_DELTA_LOCKS")"
fi
rm -rf "$OUT_DIR"
mkdir -p "$OUT_DIR/static" "$OUT_DIR/full"
run_step() {
local kind="$1"
local step_dir="$2"
local db_dir="$3"
shift 3
mkdir -p "$step_dir"
local -a cmd=(
"$RPKI_BIN"
--db "$db_dir" \
--tal-path "$TAL_PATH" \
--ta-path "$TA_PATH" \
--ccr-out "$step_dir/result.ccr" \
--report-json "$step_dir/report.json" \
--cir-enable \
--cir-out "$step_dir/input.cir" \
--cir-static-root "$OUT_DIR/static" \
--cir-tal-uri "$CIR_TAL_URI"
)
if [[ "$FULL_REPO" -ne 1 ]]; then
cmd+=(--max-depth "$MAX_DEPTH" --max-instances "$MAX_INSTANCES")
fi
cmd+=("$@")
"${cmd[@]}" >"$step_dir/run.stdout.log" 2>"$step_dir/run.stderr.log"
}
run_step \
full \
"$OUT_DIR/full" \
"$OUT_DIR/full/db" \
--payload-replay-archive "$PAYLOAD_REPLAY_ARCHIVE" \
--payload-replay-locks "$PAYLOAD_REPLAY_LOCKS" \
--validation-time "$BASE_VALIDATION_TIME"
for idx in $(seq 1 "$DELTA_COUNT"); do
step_id="$(printf 'delta-%03d' "$idx")"
run_step \
delta \
"$OUT_DIR/$step_id" \
"$OUT_DIR/$step_id/db" \
--payload-base-archive "$PAYLOAD_BASE_ARCHIVE" \
--payload-base-locks "$PAYLOAD_BASE_LOCKS" \
--payload-delta-archive "$PAYLOAD_DELTA_ARCHIVE" \
--payload-delta-locks "$PAYLOAD_DELTA_LOCKS" \
--payload-base-validation-time "$BASE_VALIDATION_TIME" \
--validation-time "$DELTA_VALIDATION_TIME"
done
python3 - <<'PY' "$OUT_DIR" "$BASE_VALIDATION_TIME" "$DELTA_VALIDATION_TIME" "$DELTA_COUNT"
import json
import sys
from pathlib import Path
out = Path(sys.argv[1])
base_validation_time = sys.argv[2]
delta_validation_time = sys.argv[3]
delta_count = int(sys.argv[4])
steps = [
{
"stepId": "full",
"kind": "full",
"validationTime": base_validation_time,
"cirPath": "full/input.cir",
"ccrPath": "full/result.ccr",
"reportPath": "full/report.json",
"previousStepId": None,
}
]
previous = "full"
for idx in range(1, delta_count + 1):
step_id = f"delta-{idx:03d}"
steps.append(
{
"stepId": step_id,
"kind": "delta",
"validationTime": delta_validation_time,
"cirPath": f"{step_id}/input.cir",
"ccrPath": f"{step_id}/result.ccr",
"reportPath": f"{step_id}/report.json",
"previousStepId": previous,
}
)
previous = step_id
summary = {
"version": 1,
"kind": "cir_sequence_offline",
"staticRoot": "static",
"steps": steps,
}
(out / "sequence.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
(out / "summary.json").write_text(
json.dumps(
{
"version": 1,
"stepCount": len(steps),
"staticFileCount": sum(1 for p in (out / "static").rglob("*") if p.is_file()),
},
indent=2,
),
encoding="utf-8",
)
PY
echo "done: $OUT_DIR"

View File

@ -0,0 +1,239 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_remote.sh \
--rir <name> \
--remote-root <path> \
[--ssh-target <user@host>] \
[--out-subdir <path>] \
[--delta-count <n>] \
[--sleep-secs <n>] \
[--full-repo] \
[--max-depth <n>] \
[--max-instances <n>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SSH_TARGET="${SSH_TARGET:-root@47.77.183.68}"
RIR=""
REMOTE_ROOT=""
OUT_SUBDIR=""
DELTA_COUNT=2
SLEEP_SECS=30
FULL_REPO=0
MAX_DEPTH=0
MAX_INSTANCES=1
while [[ $# -gt 0 ]]; do
case "$1" in
--rir) RIR="$2"; shift 2 ;;
--remote-root) REMOTE_ROOT="$2"; shift 2 ;;
--ssh-target) SSH_TARGET="$2"; shift 2 ;;
--out-subdir) OUT_SUBDIR="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
--sleep-secs) SLEEP_SECS="$2"; shift 2 ;;
--full-repo) FULL_REPO=1; shift 1 ;;
--max-depth) MAX_DEPTH="$2"; shift 2 ;;
--max-instances) MAX_INSTANCES="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$RIR" && -n "$REMOTE_ROOT" ]] || { usage >&2; exit 2; }
case "$RIR" in
afrinic) TAL_REL="tests/fixtures/tal/afrinic.tal"; TA_REL="tests/fixtures/ta/afrinic-ta.cer" ;;
apnic) TAL_REL="tests/fixtures/tal/apnic-rfc7730-https.tal"; TA_REL="tests/fixtures/ta/apnic-ta.cer" ;;
arin) TAL_REL="tests/fixtures/tal/arin.tal"; TA_REL="tests/fixtures/ta/arin-ta.cer" ;;
lacnic) TAL_REL="tests/fixtures/tal/lacnic.tal"; TA_REL="tests/fixtures/ta/lacnic-ta.cer" ;;
ripe) TAL_REL="tests/fixtures/tal/ripe-ncc.tal"; TA_REL="tests/fixtures/ta/ripe-ncc-ta.cer" ;;
*) echo "unsupported rir: $RIR" >&2; exit 2 ;;
esac
rsync -a --delete \
--exclude target \
--exclude .git \
"$ROOT_DIR/" "$SSH_TARGET:$REMOTE_ROOT/"
ssh "$SSH_TARGET" "mkdir -p '$REMOTE_ROOT/target/release'"
rsync -a "$ROOT_DIR/target/release/rpki" "$SSH_TARGET:$REMOTE_ROOT/target/release/"
ssh "$SSH_TARGET" \
RIR="$RIR" \
REMOTE_ROOT="$REMOTE_ROOT" \
OUT_SUBDIR="$OUT_SUBDIR" \
DELTA_COUNT="$DELTA_COUNT" \
SLEEP_SECS="$SLEEP_SECS" \
FULL_REPO="$FULL_REPO" \
MAX_DEPTH="$MAX_DEPTH" \
MAX_INSTANCES="$MAX_INSTANCES" \
TAL_REL="$TAL_REL" \
TA_REL="$TA_REL" \
'bash -s' <<'EOS'
set -euo pipefail
cd "$REMOTE_ROOT"
if [[ -n "${OUT_SUBDIR}" ]]; then
OUT="${OUT_SUBDIR}"
else
OUT="target/replay/cir_sequence_remote_${RIR}_$(date -u +%Y%m%dT%H%M%SZ)"
fi
mkdir -p "$OUT/full" "$OUT/static"
DB="$OUT/work-db"
write_step_timing() {
local path="$1"
local start_ms="$2"
local end_ms="$3"
local started_at="$4"
local finished_at="$5"
python3 - <<'PY' "$path" "$start_ms" "$end_ms" "$started_at" "$finished_at"
import json, sys
path, start_ms, end_ms, started_at, finished_at = sys.argv[1:]
start_ms = int(start_ms)
end_ms = int(end_ms)
with open(path, "w", encoding="utf-8") as fh:
json.dump(
{
"durationMs": end_ms - start_ms,
"startedAt": started_at,
"finishedAt": finished_at,
},
fh,
indent=2,
)
PY
}
run_step() {
local step_dir="$1"
shift
mkdir -p "$step_dir"
local start_ms end_ms started_at finished_at
start_ms="$(python3 - <<'PY'
import time
print(int(time.time() * 1000))
PY
)"
started_at="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
target/release/rpki "$@" >"$step_dir/run.stdout.log" 2>"$step_dir/run.stderr.log"
end_ms="$(python3 - <<'PY'
import time
print(int(time.time() * 1000))
PY
)"
finished_at="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
write_step_timing "$step_dir/timing.json" "$start_ms" "$end_ms" "$started_at" "$finished_at"
}
full_args=(
--db "$DB"
--tal-path "$TAL_REL"
--ta-path "$TA_REL"
--ccr-out "$OUT/full/result.ccr"
--report-json "$OUT/full/report.json"
--cir-enable
--cir-out "$OUT/full/input.cir"
--cir-static-root "$OUT/static"
--cir-tal-uri "https://example.test/${RIR}.tal"
)
if [[ "$FULL_REPO" -ne 1 ]]; then
full_args+=(--max-depth "$MAX_DEPTH" --max-instances "$MAX_INSTANCES")
fi
run_step "$OUT/full" "${full_args[@]}"
for idx in $(seq 1 "$DELTA_COUNT"); do
sleep "$SLEEP_SECS"
step="$(printf 'delta-%03d' "$idx")"
step_args=(
--db "$DB"
--tal-path "$TAL_REL"
--ta-path "$TA_REL"
--ccr-out "$OUT/$step/result.ccr"
--report-json "$OUT/$step/report.json"
--cir-enable
--cir-out "$OUT/$step/input.cir"
--cir-static-root "$OUT/static"
--cir-tal-uri "https://example.test/${RIR}.tal"
)
if [[ "$FULL_REPO" -ne 1 ]]; then
step_args+=(--max-depth "$MAX_DEPTH" --max-instances "$MAX_INSTANCES")
fi
run_step "$OUT/$step" "${step_args[@]}"
done
python3 - <<'PY' "$OUT" "$DELTA_COUNT" "$RIR"
import json, sys
from pathlib import Path
out = Path(sys.argv[1])
delta_count = int(sys.argv[2])
rir = sys.argv[3]
def read_validation_time(step_dir: Path) -> str:
report = json.loads((step_dir / "report.json").read_text(encoding="utf-8"))
return report["meta"]["validation_time_rfc3339_utc"]
def read_timing(step_dir: Path) -> dict:
return json.loads((step_dir / "timing.json").read_text(encoding="utf-8"))
steps = []
steps.append(
{
"stepId": "full",
"kind": "full",
"validationTime": read_validation_time(out / "full"),
"cirPath": "full/input.cir",
"ccrPath": "full/result.ccr",
"reportPath": "full/report.json",
"timingPath": "full/timing.json",
"previousStepId": None,
}
)
prev = "full"
for i in range(1, delta_count + 1):
step = f"delta-{i:03d}"
steps.append(
{
"stepId": step,
"kind": "delta",
"validationTime": read_validation_time(out / step),
"cirPath": f"{step}/input.cir",
"ccrPath": f"{step}/result.ccr",
"reportPath": f"{step}/report.json",
"timingPath": f"{step}/timing.json",
"previousStepId": prev,
}
)
prev = step
(out / "sequence.json").write_text(
json.dumps({"version": 1, "staticRoot": "static", "steps": steps}, indent=2),
encoding="utf-8",
)
summary = {
"version": 1,
"rir": rir,
"stepCount": len(steps),
"steps": [
{
"stepId": step["stepId"],
"kind": step["kind"],
"validationTime": step["validationTime"],
**read_timing(out / step["stepId"]),
}
for step in steps
],
}
(out / "summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
PY
echo "$OUT"
EOS

View File

@ -0,0 +1,72 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_remote_multi_rir.sh \
--remote-root <path> \
[--rir <afrinic,apnic,arin,lacnic,ripe>] \
[--ssh-target <user@host>] \
[--out-subdir-root <path>] \
[--delta-count <n>] \
[--sleep-secs <n>] \
[--full-repo] \
[--max-depth <n>] \
[--max-instances <n>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SSH_TARGET="${SSH_TARGET:-root@47.77.183.68}"
REMOTE_ROOT=""
RIRS="afrinic,apnic,arin,lacnic,ripe"
OUT_SUBDIR_ROOT=""
DELTA_COUNT=2
SLEEP_SECS=30
FULL_REPO=0
MAX_DEPTH=0
MAX_INSTANCES=1
SINGLE="$ROOT_DIR/scripts/cir/run_cir_record_sequence_remote.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--remote-root) REMOTE_ROOT="$2"; shift 2 ;;
--rir) RIRS="$2"; shift 2 ;;
--ssh-target) SSH_TARGET="$2"; shift 2 ;;
--out-subdir-root) OUT_SUBDIR_ROOT="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
--sleep-secs) SLEEP_SECS="$2"; shift 2 ;;
--full-repo) FULL_REPO=1; shift 1 ;;
--max-depth) MAX_DEPTH="$2"; shift 2 ;;
--max-instances) MAX_INSTANCES="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$REMOTE_ROOT" ]] || { usage >&2; exit 2; }
if [[ -z "$OUT_SUBDIR_ROOT" ]]; then
OUT_SUBDIR_ROOT="target/replay/cir_sequence_remote_multi_rir_$(date -u +%Y%m%dT%H%M%SZ)"
fi
IFS=',' read -r -a ITEMS <<< "$RIRS"
for rir in "${ITEMS[@]}"; do
args=(
"$SINGLE"
--rir "$rir" \
--remote-root "$REMOTE_ROOT" \
--ssh-target "$SSH_TARGET" \
--out-subdir "$OUT_SUBDIR_ROOT/$rir" \
--delta-count "$DELTA_COUNT" \
--sleep-secs "$SLEEP_SECS" \
)
if [[ "$FULL_REPO" -eq 1 ]]; then
args+=(--full-repo)
else
args+=(--max-depth "$MAX_DEPTH" --max-instances "$MAX_INSTANCES")
fi
"${args[@]}"
done
echo "$OUT_SUBDIR_ROOT"

View File

@ -0,0 +1,118 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_ta_only_multi_rir.sh \
[--rir <afrinic,apnic,arin,lacnic,ripe>] \
[--delta-count <n>] \
[--out-root <path>] \
[--rpki-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
HELPER_BIN="${HELPER_BIN:-$ROOT_DIR/target/release/cir_ta_only_fixture}"
MATERIALIZE_BIN="${MATERIALIZE_BIN:-$ROOT_DIR/target/release/cir_materialize}"
EXTRACT_BIN="${EXTRACT_BIN:-$ROOT_DIR/target/release/cir_extract_inputs}"
WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
RIRS="afrinic,apnic,arin,lacnic,ripe"
DELTA_COUNT=2
OUT_ROOT="$ROOT_DIR/target/replay/cir_sequence_multi_rir_ta_only_$(date -u +%Y%m%dT%H%M%SZ)"
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
while [[ $# -gt 0 ]]; do
case "$1" in
--rir) RIRS="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
--out-root) OUT_ROOT="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
if [[ ! -x "$HELPER_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin cir_ta_only_fixture --bin rpki --bin cir_materialize --bin cir_extract_inputs
)
fi
case_paths() {
case "$1" in
afrinic) echo "tests/fixtures/tal/afrinic.tal tests/fixtures/ta/afrinic-ta.cer" ;;
apnic) echo "tests/fixtures/tal/apnic-rfc7730-https.tal tests/fixtures/ta/apnic-ta.cer" ;;
arin) echo "tests/fixtures/tal/arin.tal tests/fixtures/ta/arin-ta.cer" ;;
lacnic) echo "tests/fixtures/tal/lacnic.tal tests/fixtures/ta/lacnic-ta.cer" ;;
ripe) echo "tests/fixtures/tal/ripe-ncc.tal tests/fixtures/ta/ripe-ncc-ta.cer" ;;
*) return 1 ;;
esac
}
mkdir -p "$OUT_ROOT"
IFS=',' read -r -a ITEMS <<< "$RIRS"
for rir in "${ITEMS[@]}"; do
read -r tal_rel ta_rel < <(case_paths "$rir")
rir_root="$OUT_ROOT/$rir"
mkdir -p "$rir_root/full" "$rir_root/static"
"$HELPER_BIN" \
--tal-path "$ROOT_DIR/$tal_rel" \
--ta-path "$ROOT_DIR/$ta_rel" \
--tal-uri "https://example.test/$rir.tal" \
--validation-time "2026-04-09T00:00:00Z" \
--cir-out "$rir_root/full/input.cir" \
--static-root "$rir_root/static"
"$EXTRACT_BIN" --cir "$rir_root/full/input.cir" --tals-dir "$rir_root/.tmp/tals" --meta-json "$rir_root/.tmp/meta.json"
"$MATERIALIZE_BIN" --cir "$rir_root/full/input.cir" --static-root "$rir_root/static" --mirror-root "$rir_root/.tmp/mirror"
FIRST_TAL="$(python3 - <<'PY' "$rir_root/.tmp/meta.json"
import json,sys
print(json.load(open(sys.argv[1]))["talFiles"][0]["path"])
PY
)"
export CIR_MIRROR_ROOT="$rir_root/.tmp/mirror"
export REAL_RSYNC_BIN=/usr/bin/rsync
export CIR_LOCAL_LINK_MODE=1
"$RPKI_BIN" \
--db "$rir_root/full/db" \
--tal-path "$FIRST_TAL" \
--disable-rrdp \
--rsync-command "$WRAPPER" \
--validation-time "2026-04-09T00:00:00Z" \
--ccr-out "$rir_root/full/result.ccr" \
--report-json "$rir_root/full/report.json" >/dev/null 2>&1
for idx in $(seq 1 "$DELTA_COUNT"); do
step="$(printf 'delta-%03d' "$idx")"
mkdir -p "$rir_root/$step"
cp "$rir_root/full/input.cir" "$rir_root/$step/input.cir"
cp "$rir_root/full/result.ccr" "$rir_root/$step/result.ccr"
cp "$rir_root/full/report.json" "$rir_root/$step/report.json"
done
python3 - <<'PY' "$rir_root" "$DELTA_COUNT"
import json, sys
from pathlib import Path
root = Path(sys.argv[1]); delta_count = int(sys.argv[2])
steps = [{"stepId":"full","kind":"full","validationTime":"2026-04-09T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":None}]
prev = "full"
for i in range(1, delta_count + 1):
step = f"delta-{i:03d}"
steps.append({"stepId":step,"kind":"delta","validationTime":"2026-04-09T00:00:00Z","cirPath":f"{step}/input.cir","ccrPath":f"{step}/result.ccr","reportPath":f"{step}/report.json","previousStepId":prev})
prev = step
(root/"sequence.json").write_text(json.dumps({"version":1,"staticRoot":"static","steps":steps}, indent=2), encoding="utf-8")
(root/"summary.json").write_text(json.dumps({"version":1,"stepCount":len(steps)}, indent=2), encoding="utf-8")
PY
done
python3 - <<'PY' "$OUT_ROOT" "$RIRS"
import json, sys
from pathlib import Path
root = Path(sys.argv[1]); rirs = [x for x in sys.argv[2].split(',') if x]
items=[]
for rir in rirs:
seq=json.loads((root/rir/'sequence.json').read_text())
items.append({"rir":rir,"stepCount":len(seq['steps'])})
(root/'summary.json').write_text(json.dumps({"version":1,"rirs":items}, indent=2), encoding='utf-8')
PY
echo "done: $OUT_ROOT"

View File

@ -0,0 +1,49 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_record_sequence_ta_only_remote_multi_rir.sh \
--remote-root <path> \
[--ssh-target <user@host>] \
[--rir <afrinic,apnic,arin,lacnic,ripe>] \
[--delta-count <n>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SSH_TARGET="${SSH_TARGET:-root@47.77.183.68}"
REMOTE_ROOT=""
RIRS="afrinic,apnic,arin,lacnic,ripe"
DELTA_COUNT=2
while [[ $# -gt 0 ]]; do
case "$1" in
--remote-root) REMOTE_ROOT="$2"; shift 2 ;;
--ssh-target) SSH_TARGET="$2"; shift 2 ;;
--rir) RIRS="$2"; shift 2 ;;
--delta-count) DELTA_COUNT="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$REMOTE_ROOT" ]] || { usage >&2; exit 2; }
rsync -a --delete \
--exclude target \
--exclude .git \
"$ROOT_DIR/" "$SSH_TARGET:$REMOTE_ROOT/"
ssh "$SSH_TARGET" "mkdir -p '$REMOTE_ROOT/target/release'"
for bin in rpki cir_ta_only_fixture cir_materialize cir_extract_inputs; do
rsync -a "$ROOT_DIR/target/release/$bin" "$SSH_TARGET:$REMOTE_ROOT/target/release/"
done
ssh "$SSH_TARGET" "bash -lc '
set -euo pipefail
cd $REMOTE_ROOT
OUT=target/replay/cir_sequence_remote_ta_only_\$(date -u +%Y%m%dT%H%M%SZ)
./scripts/cir/run_cir_record_sequence_ta_only_multi_rir.sh --rir $RIRS --delta-count $DELTA_COUNT --out-root \"\$OUT\"
echo \"\$OUT\"
'"

View File

@ -140,7 +140,7 @@ env \
--no-rir-tals \
--extra-tals-dir "$TALS_DIR" \
--enable-aspa \
update --complete >"$RUN_LOG" 2>&1
update --complete >"$RUN_LOG" 2>&1 || true
env \
LD_PRELOAD="$FAKETIME_LIB" \

View File

@ -129,17 +129,21 @@ mkdir -p "$CACHE_DIR" "$OUT_CCR_DIR"
-d "$CACHE_DIR" \
"$OUT_CCR_DIR" >"$RUN_LOG" 2>&1
"$BUILD_DIR/tests/rpki-ccr-vrps" \
--input "$OUT_CCR_DIR/rpki.ccr" \
--ta "$TA_NAME" \
--csv-out "$ACTUAL_VRPS" \
--meta-out "$ACTUAL_VRPS_META"
"$CCR_TO_COMPARE_VIEWS_BIN" \
--ccr "$OUT_CCR_DIR/rpki.ccr" \
--vrps-out "$ACTUAL_VRPS" \
--vaps-out "$ACTUAL_VAPS" \
--trust-anchor "$TA_NAME"
"$BUILD_DIR/tests/rpki-ccr-vaps" \
--input "$OUT_CCR_DIR/rpki.ccr" \
--ta "$TA_NAME" \
--csv-out "$ACTUAL_VAPS" \
--meta-out "$ACTUAL_VAPS_META"
python3 - <<'PY' "$ACTUAL_VRPS" "$ACTUAL_VAPS" "$ACTUAL_VRPS_META" "$ACTUAL_VAPS_META"
import csv, json, sys
def count_rows(path):
with open(path, newline="") as f:
rows = list(csv.reader(f))
return max(len(rows) - 1, 0)
json.dump({"count": count_rows(sys.argv[1])}, open(sys.argv[3], "w"), indent=2)
json.dump({"count": count_rows(sys.argv[2])}, open(sys.argv[4], "w"), indent=2)
PY
"$CCR_TO_COMPARE_VIEWS_BIN" --ccr "$REFERENCE_CCR" --vrps-out "$REF_VRPS" --vaps-out "$REF_VAPS" --trust-anchor "$TA_NAME"

View File

@ -0,0 +1,143 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_sequence_ours.sh \
--sequence-root <path> \
[--rpki-bin <path>] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEQUENCE_ROOT=""
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
STEP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_ours.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$SEQUENCE_ROOT" ]] || { usage >&2; exit 2; }
SEQUENCE_ROOT="$(python3 - <<'PY' "$SEQUENCE_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
SUMMARY_JSON="$SEQUENCE_ROOT/sequence-summary.json"
SUMMARY_MD="$SEQUENCE_ROOT/sequence-summary.md"
DETAIL_JSON="$SEQUENCE_ROOT/sequence-detail.json"
python3 - <<'PY' "$SEQUENCE_ROOT" "$SUMMARY_JSON" "$SUMMARY_MD" "$DETAIL_JSON" "$STEP_SCRIPT" "$RPKI_BIN" "$REAL_RSYNC_BIN"
import json
import subprocess
import sys
from pathlib import Path
sequence_root = Path(sys.argv[1])
summary_json = Path(sys.argv[2])
summary_md = Path(sys.argv[3])
detail_json = Path(sys.argv[4])
step_script = Path(sys.argv[5])
rpki_bin = sys.argv[6]
real_rsync_bin = sys.argv[7]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"]
steps = sequence["steps"]
results = []
all_match = True
for step in steps:
step_id = step["stepId"]
out_dir = sequence_root / "replay-ours" / step_id
out_dir.parent.mkdir(parents=True, exist_ok=True)
cmd = [
str(step_script),
"--cir",
str(sequence_root / step["cirPath"]),
"--static-root",
str(static_root),
"--out-dir",
str(out_dir),
"--reference-ccr",
str(sequence_root / step["ccrPath"]),
"--rpki-bin",
rpki_bin,
"--real-rsync-bin",
real_rsync_bin,
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise SystemExit(
f"ours sequence replay failed for {step_id}: stdout={proc.stdout} stderr={proc.stderr}"
)
compare = json.loads((out_dir / "compare-summary.json").read_text(encoding="utf-8"))
timing = json.loads((out_dir / "timing.json").read_text(encoding="utf-8")) if (out_dir / "timing.json").exists() else {}
record = {
"stepId": step_id,
"kind": step["kind"],
"validationTime": step["validationTime"],
"outDir": str(out_dir),
"comparePath": str(out_dir / "compare-summary.json"),
"timingPath": str(out_dir / "timing.json"),
"compare": compare,
"timing": timing,
"match": bool(compare["vrps"]["match"]) and bool(compare["vaps"]["match"]),
}
all_match = all_match and record["match"]
results.append(record)
summary = {
"version": 1,
"participant": "ours",
"sequenceRoot": str(sequence_root),
"stepCount": len(results),
"allMatch": all_match,
"steps": results,
}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
detail_json.write_text(json.dumps(results, indent=2), encoding="utf-8")
lines = [
"# Ours CIR Sequence Replay Summary",
"",
f"- `sequence_root`: `{sequence_root}`",
f"- `step_count`: `{len(results)}`",
f"- `all_match`: `{all_match}`",
"",
"| Step | Kind | VRP actual/ref | VRP match | VAP actual/ref | VAP match | Duration (ms) |",
"| --- | --- | --- | --- | --- | --- | ---: |",
]
for item in results:
compare = item["compare"]
timing = item.get("timing") or {}
lines.append(
"| {step} | {kind} | {va}/{vr} | {vm} | {aa}/{ar} | {am} | {dur} |".format(
step=item["stepId"],
kind=item["kind"],
va=compare["vrps"]["actual"],
vr=compare["vrps"]["reference"],
vm=compare["vrps"]["match"],
aa=compare["vaps"]["actual"],
ar=compare["vaps"]["reference"],
am=compare["vaps"]["match"],
dur=timing.get("durationMs", "-"),
)
)
summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8")
PY
echo "done: $SEQUENCE_ROOT"

View File

@ -0,0 +1,126 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_sequence_routinator.sh \
--sequence-root <path> \
[--routinator-root <path>] \
[--routinator-bin <path>] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEQUENCE_ROOT=""
ROUTINATOR_ROOT="${ROUTINATOR_ROOT:-/home/yuyr/dev/rust_playground/routinator}"
ROUTINATOR_BIN="${ROUTINATOR_BIN:-$ROUTINATOR_ROOT/target/debug/routinator}"
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
STEP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_routinator.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;;
--routinator-root) ROUTINATOR_ROOT="$2"; shift 2 ;;
--routinator-bin) ROUTINATOR_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$SEQUENCE_ROOT" ]] || { usage >&2; exit 2; }
SEQUENCE_ROOT="$(python3 - <<'PY' "$SEQUENCE_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
SUMMARY_JSON="$SEQUENCE_ROOT/sequence-summary-routinator.json"
SUMMARY_MD="$SEQUENCE_ROOT/sequence-summary-routinator.md"
python3 - <<'PY' "$SEQUENCE_ROOT" "$SUMMARY_JSON" "$SUMMARY_MD" "$STEP_SCRIPT" "$ROUTINATOR_ROOT" "$ROUTINATOR_BIN" "$REAL_RSYNC_BIN"
import json
import subprocess
import sys
from pathlib import Path
sequence_root = Path(sys.argv[1])
summary_json = Path(sys.argv[2])
summary_md = Path(sys.argv[3])
step_script = Path(sys.argv[4])
routinator_root = sys.argv[5]
routinator_bin = sys.argv[6]
real_rsync_bin = sys.argv[7]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"]
steps = sequence["steps"]
results = []
all_match = True
for step in steps:
step_id = step["stepId"]
out_dir = sequence_root / "replay-routinator" / step_id
out_dir.parent.mkdir(parents=True, exist_ok=True)
cmd = [
str(step_script),
"--cir",
str(sequence_root / step["cirPath"]),
"--static-root",
str(static_root),
"--out-dir",
str(out_dir),
"--reference-ccr",
str(sequence_root / step["ccrPath"]),
"--routinator-root",
routinator_root,
"--routinator-bin",
routinator_bin,
"--real-rsync-bin",
real_rsync_bin,
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise SystemExit(
f"routinator sequence replay failed for {step_id}: stdout={proc.stdout} stderr={proc.stderr}"
)
compare = json.loads((out_dir / "compare-summary.json").read_text(encoding="utf-8"))
match = bool(compare["vrps"]["match"]) and bool(compare["vaps"]["match"])
all_match = all_match and match
results.append(
{
"stepId": step_id,
"kind": step["kind"],
"validationTime": step["validationTime"],
"outDir": str(out_dir),
"comparePath": str(out_dir / "compare-summary.json"),
"match": match,
"compare": compare,
}
)
summary = {
"version": 1,
"participant": "routinator",
"sequenceRoot": str(sequence_root),
"stepCount": len(results),
"allMatch": all_match,
"steps": results,
}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = [
"# Routinator CIR Sequence Replay Summary",
"",
f"- `sequence_root`: `{sequence_root}`",
f"- `step_count`: `{len(results)}`",
f"- `all_match`: `{all_match}`",
"",
]
summary_md.write_text("\n".join(lines), encoding="utf-8")
PY
echo "done: $SEQUENCE_ROOT"

View File

@ -0,0 +1,120 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_sequence_rpki_client.sh \
--sequence-root <path> \
--build-dir <path> \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEQUENCE_ROOT=""
BUILD_DIR=""
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
STEP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_rpki_client.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;;
--build-dir) BUILD_DIR="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$SEQUENCE_ROOT" && -n "$BUILD_DIR" ]] || { usage >&2; exit 2; }
SEQUENCE_ROOT="$(python3 - <<'PY' "$SEQUENCE_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
SUMMARY_JSON="$SEQUENCE_ROOT/sequence-summary-rpki-client.json"
SUMMARY_MD="$SEQUENCE_ROOT/sequence-summary-rpki-client.md"
python3 - <<'PY' "$SEQUENCE_ROOT" "$SUMMARY_JSON" "$SUMMARY_MD" "$STEP_SCRIPT" "$BUILD_DIR" "$REAL_RSYNC_BIN"
import json
import subprocess
import sys
from pathlib import Path
sequence_root = Path(sys.argv[1])
summary_json = Path(sys.argv[2])
summary_md = Path(sys.argv[3])
step_script = Path(sys.argv[4])
build_dir = sys.argv[5]
real_rsync_bin = sys.argv[6]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"]
steps = sequence["steps"]
results = []
all_match = True
for step in steps:
step_id = step["stepId"]
out_dir = sequence_root / "replay-rpki-client" / step_id
out_dir.parent.mkdir(parents=True, exist_ok=True)
cmd = [
str(step_script),
"--cir",
str(sequence_root / step["cirPath"]),
"--static-root",
str(static_root),
"--out-dir",
str(out_dir),
"--reference-ccr",
str(sequence_root / step["ccrPath"]),
"--build-dir",
build_dir,
"--real-rsync-bin",
real_rsync_bin,
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise SystemExit(
f"rpki-client sequence replay failed for {step_id}: stdout={proc.stdout} stderr={proc.stderr}"
)
compare = json.loads((out_dir / "compare-summary.json").read_text(encoding="utf-8"))
match = bool(compare["vrps"]["match"]) and bool(compare["vaps"]["match"])
all_match = all_match and match
results.append(
{
"stepId": step_id,
"kind": step["kind"],
"validationTime": step["validationTime"],
"outDir": str(out_dir),
"comparePath": str(out_dir / "compare-summary.json"),
"match": match,
"compare": compare,
}
)
summary = {
"version": 1,
"participant": "rpki-client",
"sequenceRoot": str(sequence_root),
"stepCount": len(results),
"allMatch": all_match,
"steps": results,
}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = [
"# rpki-client CIR Sequence Replay Summary",
"",
f"- `sequence_root`: `{sequence_root}`",
f"- `step_count`: `{len(results)}`",
f"- `all_match`: `{all_match}`",
"",
]
summary_md.write_text("\n".join(lines), encoding="utf-8")
PY
echo "done: $SEQUENCE_ROOT"

View File

@ -0,0 +1,132 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_sequence_matrix_multi_rir.sh \
--root <path> \
[--rir <afrinic,apnic,arin,lacnic,ripe>] \
[--rpki-bin <path>] \
[--routinator-root <path>] \
[--routinator-bin <path>] \
[--rpki-client-build-dir <path>] \
[--drop-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
ROOT=""
RIRS="afrinic,apnic,arin,lacnic,ripe"
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
ROUTINATOR_ROOT="${ROUTINATOR_ROOT:-/home/yuyr/dev/rust_playground/routinator}"
ROUTINATOR_BIN="${ROUTINATOR_BIN:-$ROUTINATOR_ROOT/target/debug/routinator}"
RPKI_CLIENT_BUILD_DIR="${RPKI_CLIENT_BUILD_DIR:-/home/yuyr/dev/rpki-client-9.7/build-m5}"
DROP_BIN="${DROP_BIN:-$ROOT_DIR/target/release/cir_drop_report}"
OURS_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_sequence_ours.sh"
ROUTINATOR_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_sequence_routinator.sh"
RPKIC_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_sequence_rpki_client.sh"
DROP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_drop_sequence.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--root) ROOT="$2"; shift 2 ;;
--rir) RIRS="$2"; shift 2 ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
--routinator-root) ROUTINATOR_ROOT="$2"; shift 2 ;;
--routinator-bin) ROUTINATOR_BIN="$2"; shift 2 ;;
--rpki-client-build-dir) RPKI_CLIENT_BUILD_DIR="$2"; shift 2 ;;
--drop-bin) DROP_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$ROOT" ]] || { usage >&2; exit 2; }
SUMMARY_JSON="$ROOT/final-summary.json"
SUMMARY_MD="$ROOT/final-summary.md"
IFS=',' read -r -a ITEMS <<< "$RIRS"
results=()
for rir in "${ITEMS[@]}"; do
seq_root="$ROOT/$rir"
"$OURS_SCRIPT" --sequence-root "$seq_root" --rpki-bin "$RPKI_BIN"
"$ROUTINATOR_SCRIPT" --sequence-root "$seq_root" --routinator-root "$ROUTINATOR_ROOT" --routinator-bin "$ROUTINATOR_BIN"
"$RPKIC_SCRIPT" --sequence-root "$seq_root" --build-dir "$RPKI_CLIENT_BUILD_DIR"
"$DROP_SCRIPT" --sequence-root "$seq_root" --drop-bin "$DROP_BIN"
done
python3 - <<'PY' "$ROOT" "$RIRS" "$SUMMARY_JSON" "$SUMMARY_MD"
import json, sys
from pathlib import Path
from collections import Counter
root = Path(sys.argv[1]).resolve()
rirs = [item for item in sys.argv[2].split(',') if item]
summary_json = Path(sys.argv[3])
summary_md = Path(sys.argv[4])
items = []
total_steps = 0
total_dropped_vrps = 0
total_dropped_objects = 0
reason_counter = Counter()
for rir in rirs:
seq_root = root / rir
ours = json.loads((seq_root / "sequence-summary.json").read_text(encoding="utf-8"))
routinator = json.loads((seq_root / "sequence-summary-routinator.json").read_text(encoding="utf-8"))
rpki_client = json.loads((seq_root / "sequence-summary-rpki-client.json").read_text(encoding="utf-8"))
drop = json.loads((seq_root / "drop-summary.json").read_text(encoding="utf-8"))
step_count = len(ours["steps"])
total_steps += step_count
rir_dropped_vrps = 0
rir_dropped_objects = 0
for step in drop["steps"]:
drop_path = Path(step["reportPath"])
detail = json.loads(drop_path.read_text(encoding="utf-8"))
summary = detail.get("summary", {})
rir_dropped_vrps += int(summary.get("droppedVrpCount", 0))
rir_dropped_objects += int(summary.get("droppedObjectCount", 0))
total_dropped_vrps += int(summary.get("droppedVrpCount", 0))
total_dropped_objects += int(summary.get("droppedObjectCount", 0))
for reason, count in summary.get("droppedByReason", {}).items():
reason_counter[reason] += int(count)
items.append({
"rir": rir,
"stepCount": step_count,
"oursAllMatch": ours["allMatch"],
"routinatorAllMatch": routinator["allMatch"],
"rpkiClientAllMatch": rpki_client["allMatch"],
"dropSummary": drop["steps"],
"droppedVrpCount": rir_dropped_vrps,
"droppedObjectCount": rir_dropped_objects,
})
summary = {
"version": 1,
"totalStepCount": total_steps,
"totalDroppedVrpCount": total_dropped_vrps,
"totalDroppedObjectCount": total_dropped_objects,
"topReasons": [{"reason": reason, "count": count} for reason, count in reason_counter.most_common(10)],
"rirs": items,
}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = ["# Multi-RIR CIR Sequence Matrix Summary", ""]
lines.append(f"- `total_step_count`: `{total_steps}`")
lines.append(f"- `total_dropped_vrps`: `{total_dropped_vrps}`")
lines.append(f"- `total_dropped_objects`: `{total_dropped_objects}`")
lines.append("")
if reason_counter:
lines.append("## Top Drop Reasons")
lines.append("")
for reason, count in reason_counter.most_common(10):
lines.append(f"- `{reason}`: `{count}`")
lines.append("")
for item in items:
lines.append(
f"- `{item['rir']}`: `steps={item['stepCount']}` `ours={item['oursAllMatch']}` `routinator={item['routinatorAllMatch']}` `rpki-client={item['rpkiClientAllMatch']}` `drop_vrps={item['droppedVrpCount']}` `drop_objects={item['droppedObjectCount']}`"
)
summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8")
PY
echo "done: $ROOT"

250
src/bin/cir_drop_report.rs Normal file
View File

@ -0,0 +1,250 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf;
use rpki::bundle::decode_ccr_compare_views;
use rpki::ccr::decode_content_info;
use rpki::cir::{decode_cir, resolve_static_pool_file};
use rpki::data_model::roa::RoaObject;
const USAGE: &str = "Usage: cir_drop_report --cir <path> --ccr <path> --report-json <path> --static-root <path> --json-out <path> --md-out <path>";
#[derive(serde::Serialize)]
struct DroppedObjectRecord {
uri: String,
sha256: String,
kind: String,
reason_code: String,
reason_text: Option<String>,
publication_point: Option<String>,
manifest_uri: Option<String>,
derived_vrp_count: usize,
}
fn classify_reason(detail: Option<&str>, result: &str) -> String {
let text = detail.unwrap_or("").to_ascii_lowercase();
if text.contains("fetch") {
"fetch_failed".to_string()
} else if text.contains("manifest") {
"manifest_invalid".to_string()
} else if text.contains("crl") {
"crl_invalid".to_string()
} else if text.contains("policy") {
"policy_rejected".to_string()
} else if text.contains("parse") {
"object_parse_failed".to_string()
} else if text.contains("signature") || text.contains("cms") {
"cms_signature_invalid".to_string()
} else if text.contains("resource") {
"resource_invalid".to_string()
} else if text.contains("expired") || text.contains("not yet valid") {
"expired_or_not_yet_valid".to_string()
} else if result == "skipped" {
"skipped".to_string()
} else if result == "error" {
"error".to_string()
} else {
"other".to_string()
}
}
fn parse_args(argv: &[String]) -> Result<(PathBuf, PathBuf, PathBuf, PathBuf, PathBuf, PathBuf), String> {
let mut cir = None;
let mut ccr = None;
let mut report = None;
let mut static_root = None;
let mut json_out = None;
let mut md_out = None;
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--cir" => {
i += 1;
cir = Some(PathBuf::from(argv.get(i).ok_or("--cir requires a value")?));
}
"--ccr" => {
i += 1;
ccr = Some(PathBuf::from(argv.get(i).ok_or("--ccr requires a value")?));
}
"--report-json" => {
i += 1;
report = Some(PathBuf::from(
argv.get(i).ok_or("--report-json requires a value")?,
));
}
"--static-root" => {
i += 1;
static_root = Some(PathBuf::from(
argv.get(i).ok_or("--static-root requires a value")?,
));
}
"--json-out" => {
i += 1;
json_out = Some(PathBuf::from(
argv.get(i).ok_or("--json-out requires a value")?,
));
}
"--md-out" => {
i += 1;
md_out = Some(PathBuf::from(argv.get(i).ok_or("--md-out requires a value")?));
}
"-h" | "--help" => return Err(USAGE.to_string()),
other => return Err(format!("unknown argument: {other}\n\n{USAGE}")),
}
i += 1;
}
Ok((
cir.ok_or_else(|| format!("--cir is required\n\n{USAGE}"))?,
ccr.ok_or_else(|| format!("--ccr is required\n\n{USAGE}"))?,
report.ok_or_else(|| format!("--report-json is required\n\n{USAGE}"))?,
static_root.ok_or_else(|| format!("--static-root is required\n\n{USAGE}"))?,
json_out.ok_or_else(|| format!("--json-out is required\n\n{USAGE}"))?,
md_out.ok_or_else(|| format!("--md-out is required\n\n{USAGE}"))?,
))
}
fn main() -> Result<(), String> {
let argv: Vec<String> = std::env::args().collect();
let (cir_path, ccr_path, report_path, static_root, json_out, md_out) = parse_args(&argv)?;
let cir = decode_cir(&std::fs::read(&cir_path).map_err(|e| format!("read cir failed: {e}"))?)
.map_err(|e| format!("decode cir failed: {e}"))?;
let ccr = decode_content_info(
&std::fs::read(&ccr_path).map_err(|e| format!("read ccr failed: {e}"))?,
)
.map_err(|e| format!("decode ccr failed: {e}"))?;
let (vrps, vaps) =
decode_ccr_compare_views(&ccr, "unknown").map_err(|e| format!("decode compare views failed: {e}"))?;
let report: serde_json::Value =
serde_json::from_slice(&std::fs::read(&report_path).map_err(|e| format!("read report failed: {e}"))?)
.map_err(|e| format!("parse report failed: {e}"))?;
let mut object_hash_by_uri = BTreeMap::new();
for object in &cir.objects {
object_hash_by_uri.insert(object.rsync_uri.clone(), hex::encode(&object.sha256));
}
let publication_points = report["publication_points"]
.as_array()
.ok_or("report.publication_points must be an array")?;
let mut dropped_objects = Vec::new();
let mut dropped_vrp_rows = BTreeSet::new();
let mut dropped_by_kind: BTreeMap<String, usize> = BTreeMap::new();
let mut dropped_by_reason: BTreeMap<String, usize> = BTreeMap::new();
let mut unknown_roa_objects = 0usize;
for pp in publication_points {
let publication_point = pp["publication_point_rsync_uri"].as_str().map(str::to_string);
let manifest_uri = pp["manifest_rsync_uri"].as_str().map(str::to_string);
for obj in pp["objects"].as_array().into_iter().flatten() {
let result = obj["result"].as_str().unwrap_or("unknown");
if result == "ok" {
continue;
}
let uri = obj["rsync_uri"].as_str().unwrap_or("").to_string();
let hash = obj["sha256_hex"]
.as_str()
.map(str::to_string)
.or_else(|| object_hash_by_uri.get(&uri).cloned())
.unwrap_or_default();
let kind = obj["kind"].as_str().unwrap_or("other").to_string();
let detail = obj["detail"].as_str().map(str::to_string);
let reason_code = classify_reason(detail.as_deref(), result);
*dropped_by_kind.entry(kind.clone()).or_insert(0) += 1;
*dropped_by_reason.entry(reason_code.clone()).or_insert(0) += 1;
let mut derived_vrp_count = 0usize;
if kind == "roa" && !hash.is_empty() {
match resolve_static_pool_file(&static_root, &hash) {
Ok(path) => {
if let Ok(bytes) = std::fs::read(&path) {
if let Ok(roa) = RoaObject::decode_der(&bytes) {
for family in roa.roa.ip_addr_blocks {
for addr in family.addresses {
let prefix = match addr.prefix.afi {
rpki::data_model::roa::RoaAfi::Ipv4 => format!(
"{}.{}.{}.{}/{}",
addr.prefix.addr[0],
addr.prefix.addr[1],
addr.prefix.addr[2],
addr.prefix.addr[3],
addr.prefix.prefix_len
),
rpki::data_model::roa::RoaAfi::Ipv6 => {
let bytes: [u8; 16] = addr.prefix.addr;
format!("{}/{}", std::net::Ipv6Addr::from(bytes), addr.prefix.prefix_len)
}
};
let max_len = addr.max_length.unwrap_or(addr.prefix.prefix_len);
dropped_vrp_rows.insert((roa.roa.as_id, prefix, max_len));
derived_vrp_count += 1;
}
}
} else {
unknown_roa_objects += 1;
}
} else {
unknown_roa_objects += 1;
}
}
Err(_) => unknown_roa_objects += 1,
}
}
dropped_objects.push(DroppedObjectRecord {
uri,
sha256: hash,
kind,
reason_code,
reason_text: detail,
publication_point: publication_point.clone(),
manifest_uri: manifest_uri.clone(),
derived_vrp_count,
});
}
}
let output = serde_json::json!({
"summary": {
"finalVrpCount": vrps.len(),
"finalVapCount": vaps.len(),
"droppedVrpCount": dropped_vrp_rows.len(),
"droppedObjectCount": dropped_objects.len(),
"droppedByKind": dropped_by_kind,
"droppedByReason": dropped_by_reason,
"unknownDroppedRoaObjects": unknown_roa_objects,
},
"objects": dropped_objects,
});
if let Some(parent) = json_out.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("create json parent failed: {e}"))?;
}
std::fs::write(&json_out, serde_json::to_vec_pretty(&output).unwrap())
.map_err(|e| format!("write json failed: {e}"))?;
let mut md = String::new();
md.push_str("# CIR Drop Report\n\n");
md.push_str(&format!("- `final_vrp_count`: `{}`\n", vrps.len()));
md.push_str(&format!("- `final_vap_count`: `{}`\n", vaps.len()));
md.push_str(&format!("- `dropped_vrp_count`: `{}`\n", output["summary"]["droppedVrpCount"]));
md.push_str(&format!("- `dropped_object_count`: `{}`\n", output["summary"]["droppedObjectCount"]));
md.push_str(&format!(
"- `unknown_dropped_roa_objects`: `{}`\n\n",
output["summary"]["unknownDroppedRoaObjects"]
));
md.push_str("## Dropped By Kind\n\n");
for (kind, count) in output["summary"]["droppedByKind"].as_object().into_iter().flatten() {
md.push_str(&format!("- `{kind}`: `{}`\n", count.as_u64().unwrap_or(0)));
}
md.push_str("\n## Dropped By Reason\n\n");
for (reason, count) in output["summary"]["droppedByReason"].as_object().into_iter().flatten() {
md.push_str(&format!("- `{reason}`: `{}`\n", count.as_u64().unwrap_or(0)));
}
if let Some(parent) = md_out.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("create markdown parent failed: {e}"))?;
}
std::fs::write(&md_out, md).map_err(|e| format!("write markdown failed: {e}"))?;
Ok(())
}

View File

@ -0,0 +1,102 @@
use std::path::PathBuf;
use rpki::cir::{encode_cir, write_bytes_to_static_pool, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
use sha2::Digest;
const USAGE: &str = "Usage: cir_ta_only_fixture --tal-path <path> --ta-path <path> --tal-uri <url> --validation-time <rfc3339> --cir-out <path> --static-root <path>";
fn parse_args(argv: &[String]) -> Result<(PathBuf, PathBuf, String, time::OffsetDateTime, PathBuf, PathBuf), String> {
let mut tal_path = None;
let mut ta_path = None;
let mut tal_uri = None;
let mut validation_time = None;
let mut cir_out = None;
let mut static_root = None;
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--tal-path" => {
i += 1;
tal_path = Some(PathBuf::from(argv.get(i).ok_or("--tal-path requires a value")?));
}
"--ta-path" => {
i += 1;
ta_path = Some(PathBuf::from(argv.get(i).ok_or("--ta-path requires a value")?));
}
"--tal-uri" => {
i += 1;
tal_uri = Some(argv.get(i).ok_or("--tal-uri requires a value")?.clone());
}
"--validation-time" => {
i += 1;
let raw = argv.get(i).ok_or("--validation-time requires a value")?;
validation_time = Some(
time::OffsetDateTime::parse(
raw,
&time::format_description::well_known::Rfc3339,
)
.map_err(|e| format!("invalid validation time: {e}"))?,
);
}
"--cir-out" => {
i += 1;
cir_out = Some(PathBuf::from(argv.get(i).ok_or("--cir-out requires a value")?));
}
"--static-root" => {
i += 1;
static_root =
Some(PathBuf::from(argv.get(i).ok_or("--static-root requires a value")?));
}
"-h" | "--help" => return Err(USAGE.to_string()),
other => return Err(format!("unknown argument: {other}\n\n{USAGE}")),
}
i += 1;
}
Ok((
tal_path.ok_or_else(|| format!("--tal-path is required\n\n{USAGE}"))?,
ta_path.ok_or_else(|| format!("--ta-path is required\n\n{USAGE}"))?,
tal_uri.ok_or_else(|| format!("--tal-uri is required\n\n{USAGE}"))?,
validation_time.ok_or_else(|| format!("--validation-time is required\n\n{USAGE}"))?,
cir_out.ok_or_else(|| format!("--cir-out is required\n\n{USAGE}"))?,
static_root.ok_or_else(|| format!("--static-root is required\n\n{USAGE}"))?,
))
}
fn main() -> Result<(), String> {
let argv: Vec<String> = std::env::args().collect();
let (tal_path, ta_path, tal_uri, validation_time, cir_out, static_root) = parse_args(&argv)?;
let tal_bytes = std::fs::read(&tal_path).map_err(|e| format!("read tal failed: {e}"))?;
let ta_bytes = std::fs::read(&ta_path).map_err(|e| format!("read ta failed: {e}"))?;
let tal = rpki::data_model::tal::Tal::decode_bytes(&tal_bytes)
.map_err(|e| format!("decode tal failed: {e}"))?;
let ta_rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.ok_or("tal must contain an rsync URI")?
.as_str()
.to_string();
let sha = sha2::Sha256::digest(&ta_bytes);
let hash_hex = hex::encode(sha);
write_bytes_to_static_pool(&static_root, validation_time.date(), &hash_hex, &ta_bytes)
.map_err(|e| format!("write static pool failed: {e}"))?;
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time,
objects: vec![CirObject {
rsync_uri: ta_rsync_uri,
sha256: sha.to_vec(),
}],
tals: vec![CirTal { tal_uri, tal_bytes }],
};
let der = encode_cir(&cir).map_err(|e| format!("encode cir failed: {e}"))?;
if let Some(parent) = cir_out.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("create cir parent failed: {e}"))?;
}
std::fs::write(&cir_out, der).map_err(|e| format!("write cir failed: {e}"))?;
Ok(())
}

View File

@ -2,6 +2,7 @@ use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::Path;
use crate::audit::{AuditObjectResult, PublicationPointAudit};
use crate::cir::encode::{CirEncodeError, encode_cir};
use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
@ -13,6 +14,14 @@ use crate::cir::static_pool::{
use crate::data_model::ta::TrustAnchor;
use crate::storage::{RepositoryViewState, RocksStore};
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirExportTiming {
pub build_cir_ms: u64,
pub static_pool_ms: u64,
pub write_cir_ms: u64,
pub total_ms: u64,
}
#[derive(Debug, thiserror::Error)]
pub enum CirExportError {
#[error("list repository_view entries failed: {0}")]
@ -42,6 +51,7 @@ pub struct CirExportSummary {
pub object_count: usize,
pub tal_count: usize,
pub static_pool: CirStaticPoolExportSummary,
pub timing: CirExportTiming,
}
pub fn build_cir_from_run(
@ -49,6 +59,7 @@ pub fn build_cir_from_run(
trust_anchor: &TrustAnchor,
tal_uri: &str,
validation_time: time::OffsetDateTime,
publication_points: &[PublicationPointAudit],
) -> Result<CanonicalInputRepresentation, CirExportError> {
if !(tal_uri.starts_with("https://") || tal_uri.starts_with("http://")) {
return Err(CirExportError::InvalidTalUri(tal_uri.to_string()));
@ -69,6 +80,26 @@ pub fn build_cir_from_run(
}
}
// CIR must describe the actual input world used by validation. When a
// publication point falls back to the latest validated current instance,
// repository_view may not contain the reused manifest/object set. Pull
// those object hashes from the audit so replay can reconstruct the same
// world state.
for pp in publication_points {
if pp.source != "vcir_current_instance" {
continue;
}
for obj in &pp.objects {
if obj.result != AuditObjectResult::Ok {
continue;
}
if !obj.rsync_uri.starts_with("rsync://") {
continue;
}
objects.insert(obj.rsync_uri.clone(), obj.sha256_hex.to_ascii_lowercase());
}
}
let ta_hash = ta_sha256_hex(&trust_anchor.ta_certificate.raw_der);
let mut saw_rsync_uri = false;
for uri in &trust_anchor.tal.ta_uris {
@ -149,17 +180,41 @@ pub fn export_cir_from_run(
trust_anchor: &TrustAnchor,
tal_uri: &str,
validation_time: time::OffsetDateTime,
publication_points: &[PublicationPointAudit],
cir_out: &Path,
static_root: &Path,
capture_date_utc: time::Date,
) -> Result<CirExportSummary, CirExportError> {
let cir = build_cir_from_run(store, trust_anchor, tal_uri, validation_time)?;
let total_started = std::time::Instant::now();
let started = std::time::Instant::now();
let cir = build_cir_from_run(
store,
trust_anchor,
tal_uri,
validation_time,
publication_points,
)?;
let build_cir_ms = started.elapsed().as_millis() as u64;
let started = std::time::Instant::now();
let static_pool = export_cir_static_pool(store, static_root, capture_date_utc, &cir, trust_anchor)?;
let static_pool_ms = started.elapsed().as_millis() as u64;
let started = std::time::Instant::now();
write_cir_file(cir_out, &cir)?;
let write_cir_ms = started.elapsed().as_millis() as u64;
Ok(CirExportSummary {
object_count: cir.objects.len(),
tal_count: cir.tals.len(),
static_pool,
timing: CirExportTiming {
build_cir_ms,
static_pool_ms,
write_cir_ms,
total_ms: total_started.elapsed().as_millis() as u64,
},
})
}
@ -222,7 +277,13 @@ mod tests {
.unwrap();
let ta = sample_trust_anchor();
let cir = build_cir_from_run(&store, &ta, "https://example.test/root.tal", sample_time())
let cir = build_cir_from_run(
&store,
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
)
.expect("build cir");
assert_eq!(cir.version, CIR_VERSION_V1);
assert_eq!(cir.tals.len(), 1);
@ -267,6 +328,7 @@ mod tests {
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
&cir_path,
&static_root,
sample_date(),
@ -282,4 +344,48 @@ mod tests {
let object_path = static_pool_path(&static_root, sample_date(), &hash).unwrap();
assert_eq!(std::fs::read(object_path).unwrap(), bytes);
}
#[test]
fn build_cir_from_run_includes_vcir_current_instance_objects_from_audit() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let ta = sample_trust_anchor();
let mut pp = PublicationPointAudit {
source: "vcir_current_instance".to_string(),
..PublicationPointAudit::default()
};
pp.objects.push(crate::audit::ObjectAuditEntry {
rsync_uri: "rsync://example.test/repo/fallback.mft".to_string(),
sha256_hex: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_string(),
kind: crate::audit::AuditObjectKind::Manifest,
result: crate::audit::AuditObjectResult::Ok,
detail: None,
});
pp.objects.push(crate::audit::ObjectAuditEntry {
rsync_uri: "rsync://example.test/repo/fallback.roa".to_string(),
sha256_hex: "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb".to_string(),
kind: crate::audit::AuditObjectKind::Roa,
result: crate::audit::AuditObjectResult::Ok,
detail: None,
});
let cir = build_cir_from_run(
&store,
&ta,
"https://example.test/root.tal",
sample_time(),
&[pp],
)
.expect("build cir");
assert!(cir
.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/fallback.mft"));
assert!(cir
.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/fallback.roa"));
}
}

View File

@ -2,6 +2,7 @@ pub mod decode;
pub mod encode;
pub mod materialize;
pub mod model;
pub mod sequence;
#[cfg(feature = "full")]
pub mod export;
#[cfg(feature = "full")]
@ -16,6 +17,7 @@ pub use materialize::{
pub use model::{
CIR_VERSION_V1, CirHashAlgorithm, CirObject, CirTal, CanonicalInputRepresentation,
};
pub use sequence::{CirSequenceManifest, CirSequenceStep, CirSequenceStepKind};
#[cfg(feature = "full")]
pub use export::{CirExportError, CirExportSummary, build_cir_from_run, export_cir_from_run, write_cir_file};
#[cfg(feature = "full")]

147
src/cir/sequence.rs Normal file
View File

@ -0,0 +1,147 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum CirSequenceStepKind {
Full,
Delta,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct CirSequenceStep {
pub step_id: String,
pub kind: CirSequenceStepKind,
pub validation_time: String,
pub cir_path: String,
pub ccr_path: String,
pub report_path: String,
pub previous_step_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct CirSequenceManifest {
pub version: u32,
pub static_root: String,
pub steps: Vec<CirSequenceStep>,
}
impl CirSequenceManifest {
pub fn validate(&self) -> Result<(), String> {
if self.version == 0 {
return Err("sequence.version must be positive".to_string());
}
if self.static_root.trim().is_empty() {
return Err("sequence.static_root must not be empty".to_string());
}
if self.steps.is_empty() {
return Err("sequence.steps must not be empty".to_string());
}
let mut previous_ids = std::collections::BTreeSet::new();
for (idx, step) in self.steps.iter().enumerate() {
if step.step_id.trim().is_empty() {
return Err(format!("sequence.steps[{idx}].step_id must not be empty"));
}
if !previous_ids.insert(step.step_id.clone()) {
return Err(format!("sequence.steps[{idx}].step_id must be unique"));
}
if step.validation_time.trim().is_empty() {
return Err(format!(
"sequence.steps[{idx}].validation_time must not be empty"
));
}
if step.cir_path.trim().is_empty()
|| step.ccr_path.trim().is_empty()
|| step.report_path.trim().is_empty()
{
return Err(format!(
"sequence.steps[{idx}] output paths must not be empty"
));
}
match step.kind {
CirSequenceStepKind::Full => {
if idx != 0 {
return Err("full step must be the first step".to_string());
}
if step.previous_step_id.is_some() {
return Err("full step must not reference previous_step_id".to_string());
}
}
CirSequenceStepKind::Delta => {
if idx == 0 {
return Err("delta step cannot be the first step".to_string());
}
let previous = step.previous_step_id.as_ref().ok_or_else(|| {
format!("sequence.steps[{idx}] delta step must set previous_step_id")
})?;
if !previous_ids.contains(previous) {
return Err(format!(
"sequence.steps[{idx}] previous_step_id must reference an earlier step"
));
}
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::{CirSequenceManifest, CirSequenceStep, CirSequenceStepKind};
fn sample_manifest() -> CirSequenceManifest {
CirSequenceManifest {
version: 1,
static_root: "static".to_string(),
steps: vec![
CirSequenceStep {
step_id: "full".to_string(),
kind: CirSequenceStepKind::Full,
validation_time: "2026-04-09T00:00:00Z".to_string(),
cir_path: "full/input.cir".to_string(),
ccr_path: "full/result.ccr".to_string(),
report_path: "full/report.json".to_string(),
previous_step_id: None,
},
CirSequenceStep {
step_id: "delta-001".to_string(),
kind: CirSequenceStepKind::Delta,
validation_time: "2026-04-09T00:10:00Z".to_string(),
cir_path: "delta-001/input.cir".to_string(),
ccr_path: "delta-001/result.ccr".to_string(),
report_path: "delta-001/report.json".to_string(),
previous_step_id: Some("full".to_string()),
},
],
}
}
#[test]
fn sequence_manifest_validate_accepts_minimal_chain() {
sample_manifest().validate().expect("valid sequence");
}
#[test]
fn sequence_manifest_validate_rejects_bad_order_and_duplicates() {
let mut bad = sample_manifest();
bad.steps.swap(0, 1);
let err = bad.validate().expect_err("full must be first");
assert!(
err.contains("delta step cannot be the first step")
|| err.contains("full step must be the first step")
);
let mut dup = sample_manifest();
dup.steps[1].step_id = "full".to_string();
let err = dup.validate().expect_err("duplicate id must fail");
assert!(err.contains("must be unique"));
}
#[test]
fn sequence_manifest_validate_rejects_missing_previous_reference() {
let mut bad = sample_manifest();
bad.steps[1].previous_step_id = Some("missing".to_string());
let err = bad.validate().expect_err("missing previous must fail");
assert!(err.contains("previous_step_id"));
}
}

View File

@ -22,6 +22,27 @@ use crate::validation::run_tree_from_tal::{
run_tree_from_tal_url_serial_audit_with_timing,
};
use crate::validation::tree::TreeRunConfig;
use serde::Serialize;
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
struct RunStageTiming {
validation_ms: u64,
report_build_ms: u64,
report_write_ms: Option<u64>,
ccr_build_ms: Option<u64>,
ccr_write_ms: Option<u64>,
cir_build_cir_ms: Option<u64>,
cir_static_pool_ms: Option<u64>,
cir_write_cir_ms: Option<u64>,
cir_total_ms: Option<u64>,
total_ms: u64,
publication_points: usize,
repo_sync_ms_total: u64,
download_event_count: u64,
rrdp_download_ms_total: u64,
rsync_download_ms_total: u64,
download_bytes_total: u64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CliArgs {
@ -655,6 +676,8 @@ pub fn run(argv: &[String]) -> Result<(), String> {
None
};
let total_started = std::time::Instant::now();
let validation_started = std::time::Instant::now();
let out = if delta_replay_mode {
let tal_path = args
.tal_path
@ -988,11 +1011,43 @@ pub fn run(argv: &[String]) -> Result<(), String> {
}
};
let validation_ms = validation_started.elapsed().as_millis() as u64;
if let Some((_out_dir, t)) = timing.as_ref() {
t.record_count("instances_processed", out.tree.instances_processed as u64);
t.record_count("instances_failed", out.tree.instances_failed as u64);
}
let publication_points = out.publication_points.len();
let repo_sync_ms_total: u64 = out
.publication_points
.iter()
.map(|pp| pp.repo_sync_duration_ms.unwrap_or(0))
.sum();
let download_event_count = out.download_stats.events_total;
let rrdp_download_ms_total: u64 = ["rrdp_notification", "rrdp_snapshot", "rrdp_delta"]
.iter()
.map(|key| {
out.download_stats
.by_kind
.get(*key)
.map(|item| item.duration_ms_total)
.unwrap_or(0)
})
.sum();
let rsync_download_ms_total = out
.download_stats
.by_kind
.get("rsync")
.map(|item| item.duration_ms_total)
.unwrap_or(0);
let download_bytes_total: u64 = out
.download_stats
.by_kind
.values()
.map(|item| item.bytes_total.unwrap_or(0))
.sum();
#[cfg(feature = "profile")]
let profiler_report = if let Some(guard) = profiler_guard.take() {
Some(
@ -1005,7 +1060,10 @@ pub fn run(argv: &[String]) -> Result<(), String> {
None
};
let mut ccr_build_ms = None;
let mut ccr_write_ms = None;
if let Some(path) = args.ccr_out_path.as_deref() {
let started = std::time::Instant::now();
let ccr = build_ccr_from_run(
&store,
&[out.discovery.trust_anchor.clone()],
@ -1015,10 +1073,17 @@ pub fn run(argv: &[String]) -> Result<(), String> {
time::OffsetDateTime::now_utc(),
)
.map_err(|e| e.to_string())?;
ccr_build_ms = Some(started.elapsed().as_millis() as u64);
let started = std::time::Instant::now();
write_ccr_file(path, &ccr).map_err(|e| e.to_string())?;
ccr_write_ms = Some(started.elapsed().as_millis() as u64);
eprintln!("wrote CCR: {}", path.display());
}
let mut cir_build_cir_ms = None;
let mut cir_static_pool_ms = None;
let mut cir_write_cir_ms = None;
let mut cir_total_ms = None;
if args.cir_enabled {
let cir_tal_uri = args
.tal_url
@ -1038,25 +1103,66 @@ pub fn run(argv: &[String]) -> Result<(), String> {
&out.discovery.trust_anchor,
&cir_tal_uri,
validation_time,
&out.publication_points,
cir_out_path,
cir_static_root,
time::OffsetDateTime::now_utc().date(),
)
.map_err(|e| e.to_string())?;
cir_build_cir_ms = Some(summary.timing.build_cir_ms);
cir_static_pool_ms = Some(summary.timing.static_pool_ms);
cir_write_cir_ms = Some(summary.timing.write_cir_ms);
cir_total_ms = Some(summary.timing.total_ms);
eprintln!(
"wrote CIR: {} (objects={}, tals={}, static_written={}, static_reused={})",
"wrote CIR: {} (objects={}, tals={}, static_written={}, static_reused={}, build_cir_ms={}, static_pool_ms={}, write_cir_ms={}, total_ms={})",
cir_out_path.display(),
summary.object_count,
summary.tal_count,
summary.static_pool.written_files,
summary.static_pool.reused_files
summary.static_pool.reused_files,
summary.timing.build_cir_ms,
summary.timing.static_pool_ms,
summary.timing.write_cir_ms,
summary.timing.total_ms
);
}
let report_started = std::time::Instant::now();
let report = build_report(&policy, validation_time, out);
let report_build_ms = report_started.elapsed().as_millis() as u64;
let mut report_write_ms = None;
if let Some(p) = args.report_json_path.as_deref() {
let started = std::time::Instant::now();
write_json(p, &report)?;
report_write_ms = Some(started.elapsed().as_millis() as u64);
if let Some(parent) = p.parent() {
let stage_timing = RunStageTiming {
validation_ms,
report_build_ms,
report_write_ms,
ccr_build_ms,
ccr_write_ms,
cir_build_cir_ms,
cir_static_pool_ms,
cir_write_cir_ms,
cir_total_ms,
total_ms: total_started.elapsed().as_millis() as u64,
publication_points,
repo_sync_ms_total,
download_event_count,
rrdp_download_ms_total,
rsync_download_ms_total,
download_bytes_total,
};
let stage_timing_path = parent.join("stage-timing.json");
std::fs::write(
&stage_timing_path,
serde_json::to_vec_pretty(&stage_timing).map_err(|e| e.to_string())?,
)
.map_err(|e| format!("write stage timing failed: {}: {e}", stage_timing_path.display()))?;
eprintln!("analysis: wrote {}", stage_timing_path.display());
}
}
if let Some((out_dir, t)) = timing.as_ref() {

View File

@ -0,0 +1,230 @@
use std::collections::BTreeSet;
use std::path::PathBuf;
use std::process::Command;
use rpki::ccr::{encode_content_info, CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation, TrustAnchorState};
use rpki::cir::{encode_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
#[test]
fn cir_full_and_delta_pair_reuses_shared_static_pool() {
let script = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_record_full_delta.sh");
let out_dir = tempfile::tempdir().expect("tempdir");
let out = out_dir.path().join("cir-pair");
let fixture_root = out_dir.path().join("fixture");
std::fs::create_dir_all(&fixture_root).unwrap();
let static_payload_root = fixture_root.join("payloads");
std::fs::create_dir_all(&static_payload_root).unwrap();
let base_locks = fixture_root.join("base-locks.json");
let delta_locks = fixture_root.join("locks-delta.json");
std::fs::write(
&base_locks,
br#"{"validationTime":"2026-03-16T11:49:15Z"}"#,
)
.unwrap();
std::fs::write(
&delta_locks,
br#"{"validationTime":"2026-03-16T11:50:15Z"}"#,
)
.unwrap();
let full_obj_hash = {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(b"full-object"))
};
let delta_obj_hash = {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(b"delta-object"))
};
let full_cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-03-16T11:49:15Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: "rsync://example.net/repo/full.roa".to_string(),
sha256: hex::decode(&full_obj_hash).unwrap(),
}],
tals: vec![CirTal {
tal_uri: "https://rpki.apnic.net/tal/apnic-rfc7730-https.tal".to_string(),
tal_bytes: b"rsync://example.net/repo/root.cer\nMIIB".to_vec(),
}],
};
let delta_cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-03-16T11:50:15Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: {
let mut objects = vec![
CirObject {
rsync_uri: "rsync://example.net/repo/full.roa".to_string(),
sha256: hex::decode(&full_obj_hash).unwrap(),
},
CirObject {
rsync_uri: "rsync://example.net/repo/delta.roa".to_string(),
sha256: hex::decode(&delta_obj_hash).unwrap(),
},
];
objects.sort_by(|a, b| a.rsync_uri.cmp(&b.rsync_uri));
objects
},
tals: full_cir.tals.clone(),
};
let empty_ccr = CcrContentInfo::new(RpkiCanonicalCacheRepresentation {
version: 0,
hash_alg: CcrDigestAlgorithm::Sha256,
produced_at: full_cir.validation_time,
mfts: None,
vrps: None,
vaps: None,
tas: Some(TrustAnchorState { skis: vec![vec![0x11; 20]], hash: vec![0x22; 32] }),
rks: None,
});
let full_cir_path = fixture_root.join("full.cir");
let delta_cir_path = fixture_root.join("delta.cir");
let full_ccr_path = fixture_root.join("full.ccr");
let delta_ccr_path = fixture_root.join("delta.ccr");
let full_report_path = fixture_root.join("full-report.json");
let delta_report_path = fixture_root.join("delta-report.json");
std::fs::write(&full_cir_path, encode_cir(&full_cir).unwrap()).unwrap();
std::fs::write(&delta_cir_path, encode_cir(&delta_cir).unwrap()).unwrap();
std::fs::write(&full_ccr_path, encode_content_info(&empty_ccr).unwrap()).unwrap();
std::fs::write(&delta_ccr_path, encode_content_info(&empty_ccr).unwrap()).unwrap();
std::fs::write(&full_report_path, br#"{"format_version":2,"publication_points":[]}"#).unwrap();
std::fs::write(&delta_report_path, br#"{"format_version":2,"publication_points":[]}"#).unwrap();
let stub = out_dir.path().join("stub-rpki.sh");
std::fs::write(
&stub,
format!(
r#"#!/usr/bin/env bash
set -euo pipefail
MODE=""
cir=""
ccr=""
report=""
static_root=""
while [[ $# -gt 0 ]]; do
case "$1" in
--payload-replay-archive) MODE="full"; shift 2 ;;
--payload-base-archive) MODE="delta"; shift 2 ;;
--cir-out) cir="$2"; shift 2 ;;
--ccr-out) ccr="$2"; shift 2 ;;
--report-json) report="$2"; shift 2 ;;
--cir-static-root) static_root="$2"; shift 2 ;;
*) shift ;;
esac
done
mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$static_root/20260316/{{ab,cd,ef}}/00"
if [[ "$MODE" == "full" ]]; then
cp "{full_cir}" "$cir"
cp "{full_ccr}" "$ccr"
cp "{full_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
else
cp "{delta_cir}" "$cir"
cp "{delta_ccr}" "$ccr"
cp "{delta_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
install -D -m 0644 "{payload_root}/delta-object" "$static_root/20260316/ef/00/{delta_hash}"
fi
"#,
full_cir = full_cir_path.display(),
delta_cir = delta_cir_path.display(),
full_ccr = full_ccr_path.display(),
delta_ccr = delta_ccr_path.display(),
full_report = full_report_path.display(),
delta_report = delta_report_path.display(),
payload_root = static_payload_root.display(),
full_hash = full_obj_hash,
delta_hash = delta_obj_hash,
),
)
.unwrap();
std::fs::set_permissions(&stub, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
std::fs::write(static_payload_root.join("full-object"), b"full-object").unwrap();
std::fs::write(static_payload_root.join("delta-object"), b"delta-object").unwrap();
let proc = Command::new(script)
.args([
"--out-dir",
out.to_string_lossy().as_ref(),
"--tal-path",
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/tal/apnic-rfc7730-https.tal")
.to_string_lossy()
.as_ref(),
"--ta-path",
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/ta/apnic-ta.cer")
.to_string_lossy()
.as_ref(),
"--cir-tal-uri",
"https://rpki.apnic.net/tal/apnic-rfc7730-https.tal",
"--payload-replay-archive",
"/tmp/base-payload-archive",
"--payload-replay-locks",
base_locks.to_string_lossy().as_ref(),
"--payload-base-archive",
"/tmp/base-payload-archive",
"--payload-base-locks",
base_locks.to_string_lossy().as_ref(),
"--payload-delta-archive",
"/tmp/payload-delta-archive",
"--payload-delta-locks",
delta_locks.to_string_lossy().as_ref(),
"--max-depth",
"0",
"--max-instances",
"1",
"--rpki-bin",
stub.to_string_lossy().as_ref(),
])
.output()
.expect("run cir record pair");
assert!(
proc.status.success(),
"stderr={}",
String::from_utf8_lossy(&proc.stderr)
);
let full_cir = rpki::cir::decode_cir(&std::fs::read(out.join("full").join("input.cir")).unwrap())
.expect("decode full cir");
let delta_cir =
rpki::cir::decode_cir(&std::fs::read(out.join("delta-001").join("input.cir")).unwrap())
.expect("decode delta cir");
let mut hashes = BTreeSet::new();
for item in &full_cir.objects {
hashes.insert(hex::encode(&item.sha256));
}
for item in &delta_cir.objects {
hashes.insert(hex::encode(&item.sha256));
}
let static_file_count = walk(out.join("static")).len();
assert_eq!(static_file_count, hashes.len());
assert!(out.join("summary.json").is_file());
assert!(out.join("full").join("result.ccr").is_file());
assert!(out.join("delta-001").join("result.ccr").is_file());
}
fn walk(path: std::path::PathBuf) -> Vec<std::path::PathBuf> {
let mut out = Vec::new();
if path.is_file() {
out.push(path);
} else if path.is_dir() {
for entry in std::fs::read_dir(path).unwrap() {
out.extend(walk(entry.unwrap().path()));
}
}
out
}

View File

@ -0,0 +1,111 @@
use std::path::PathBuf;
use std::process::Command;
use rpki::ccr::{
encode_content_info, CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation,
TrustAnchorState,
};
use rpki::cir::{encode_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
#[test]
fn cir_drop_report_counts_dropped_roa_objects_and_vrps() {
let td = tempfile::tempdir().expect("tempdir");
let cir_path = td.path().join("input.cir");
let ccr_path = td.path().join("output.ccr");
let report_path = td.path().join("report.json");
let static_root = td.path().join("static");
let json_out = td.path().join("drop.json");
let md_out = td.path().join("drop.md");
let roa_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/AS4538.roa");
let roa_bytes = std::fs::read(&roa_path).expect("read roa fixture");
let hash = {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(&roa_bytes))
};
let dir = static_root.join("20260409").join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(dir.join(&hash), &roa_bytes).unwrap();
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-04-09T00:00:00Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: "rsync://example.net/repo/AS4538.roa".to_string(),
sha256: hex::decode(&hash).unwrap(),
}],
tals: vec![CirTal {
tal_uri: "https://example.test/root.tal".to_string(),
tal_bytes: b"rsync://example.net/repo/root.cer\nMIIB".to_vec(),
}],
};
std::fs::write(&cir_path, encode_cir(&cir).unwrap()).unwrap();
let ccr = CcrContentInfo::new(RpkiCanonicalCacheRepresentation {
version: 0,
hash_alg: CcrDigestAlgorithm::Sha256,
produced_at: cir.validation_time,
mfts: None,
vrps: None,
vaps: None,
tas: Some(TrustAnchorState {
skis: vec![vec![0x11; 20]],
hash: vec![0x22; 32],
}),
rks: None,
});
std::fs::write(&ccr_path, encode_content_info(&ccr).unwrap()).unwrap();
let report = serde_json::json!({
"format_version": 2,
"publication_points": [
{
"manifest_rsync_uri": "rsync://example.net/repo/example.mft",
"publication_point_rsync_uri": "rsync://example.net/repo/",
"objects": [
{
"rsync_uri": "rsync://example.net/repo/AS4538.roa",
"sha256_hex": hash,
"kind": "roa",
"result": "error",
"detail": "policy rejected"
}
]
}
]
});
std::fs::write(&report_path, serde_json::to_vec_pretty(&report).unwrap()).unwrap();
let bin = env!("CARGO_BIN_EXE_cir_drop_report");
let out = Command::new(bin)
.args([
"--cir",
cir_path.to_string_lossy().as_ref(),
"--ccr",
ccr_path.to_string_lossy().as_ref(),
"--report-json",
report_path.to_string_lossy().as_ref(),
"--static-root",
static_root.to_string_lossy().as_ref(),
"--json-out",
json_out.to_string_lossy().as_ref(),
"--md-out",
md_out.to_string_lossy().as_ref(),
])
.output()
.expect("run cir_drop_report");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let output: serde_json::Value =
serde_json::from_slice(&std::fs::read(&json_out).unwrap()).unwrap();
assert_eq!(output["summary"]["droppedObjectCount"], 1);
assert!(output["summary"]["droppedVrpCount"].as_u64().unwrap_or(0) >= 1);
assert_eq!(output["summary"]["droppedByKind"]["roa"], 1);
assert!(std::fs::read_to_string(&md_out).unwrap().contains("Dropped By Reason"));
}

View File

@ -0,0 +1,212 @@
use std::path::PathBuf;
use std::process::Command;
use rpki::ccr::{encode_content_info, CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation, TrustAnchorState};
use rpki::cir::{encode_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
#[test]
fn cir_offline_sequence_writes_parseable_sequence_json_and_steps() {
let out_dir = tempfile::tempdir().expect("tempdir");
let out = out_dir.path().join("cir-sequence");
let script =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_record_sequence_offline.sh");
let fixture_root = out_dir.path().join("fixture");
let static_payload_root = fixture_root.join("payloads");
std::fs::create_dir_all(&static_payload_root).unwrap();
let base_locks = fixture_root.join("base-locks.json");
let delta_locks = fixture_root.join("locks-delta.json");
std::fs::write(&base_locks, br#"{"validationTime":"2026-03-16T11:49:15Z"}"#).unwrap();
std::fs::write(&delta_locks, br#"{"validationTime":"2026-03-16T11:50:15Z"}"#).unwrap();
let mk_cir = |uri: &str, hash_hex: &str, vt: &str| CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(vt, &time::format_description::well_known::Rfc3339).unwrap(),
objects: vec![CirObject {
rsync_uri: uri.to_string(),
sha256: hex::decode(hash_hex).unwrap(),
}],
tals: vec![CirTal {
tal_uri: "https://rpki.apnic.net/tal/apnic-rfc7730-https.tal".to_string(),
tal_bytes: b"rsync://example.net/repo/root.cer\nMIIB".to_vec(),
}],
};
let full_hash = {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(b"full-object"))
};
let delta_hash = {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(b"delta-object"))
};
let full_cir = mk_cir("rsync://example.net/repo/full.roa", &full_hash, "2026-03-16T11:49:15Z");
let delta_cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-03-16T11:50:15Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: {
let mut objects = vec![
full_cir.objects[0].clone(),
CirObject {
rsync_uri: "rsync://example.net/repo/delta.roa".to_string(),
sha256: hex::decode(&delta_hash).unwrap(),
},
];
objects.sort_by(|a, b| a.rsync_uri.cmp(&b.rsync_uri));
objects
},
tals: full_cir.tals.clone(),
};
let empty_ccr = CcrContentInfo::new(RpkiCanonicalCacheRepresentation {
version: 0,
hash_alg: CcrDigestAlgorithm::Sha256,
produced_at: full_cir.validation_time,
mfts: None,
vrps: None,
vaps: None,
tas: Some(TrustAnchorState { skis: vec![vec![0x11; 20]], hash: vec![0x22; 32] }),
rks: None,
});
let full_cir_path = fixture_root.join("full.cir");
let delta_cir_path = fixture_root.join("delta.cir");
let full_ccr_path = fixture_root.join("full.ccr");
let delta_ccr_path = fixture_root.join("delta.ccr");
let full_report_path = fixture_root.join("full-report.json");
let delta_report_path = fixture_root.join("delta-report.json");
std::fs::create_dir_all(&fixture_root).unwrap();
std::fs::write(&full_cir_path, encode_cir(&full_cir).unwrap()).unwrap();
std::fs::write(&delta_cir_path, encode_cir(&delta_cir).unwrap()).unwrap();
std::fs::write(&full_ccr_path, encode_content_info(&empty_ccr).unwrap()).unwrap();
std::fs::write(&delta_ccr_path, encode_content_info(&empty_ccr).unwrap()).unwrap();
std::fs::write(&full_report_path, br#"{"format_version":2,"publication_points":[]}"#).unwrap();
std::fs::write(&delta_report_path, br#"{"format_version":2,"publication_points":[]}"#).unwrap();
std::fs::write(static_payload_root.join("full-object"), b"full-object").unwrap();
std::fs::write(static_payload_root.join("delta-object"), b"delta-object").unwrap();
let stub = out_dir.path().join("stub-rpki.sh");
std::fs::write(
&stub,
format!(
r#"#!/usr/bin/env bash
set -euo pipefail
MODE=""
cir=""
ccr=""
report=""
static_root=""
while [[ $# -gt 0 ]]; do
case "$1" in
--payload-replay-archive) MODE="full"; shift 2 ;;
--payload-base-archive) MODE="delta"; shift 2 ;;
--cir-out) cir="$2"; shift 2 ;;
--ccr-out) ccr="$2"; shift 2 ;;
--report-json) report="$2"; shift 2 ;;
--cir-static-root) static_root="$2"; shift 2 ;;
*) shift ;;
esac
done
mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$static_root/20260316/{{ab,cd,ef}}/00"
if [[ "$MODE" == "full" ]]; then
cp "{full_cir}" "$cir"
cp "{full_ccr}" "$ccr"
cp "{full_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
else
if [[ "$cir" == *delta-001* ]]; then
cp "{delta_cir}" "$cir"
cp "{delta_ccr}" "$ccr"
cp "{delta_report}" "$report"
else
cp "{delta_cir}" "$cir"
cp "{delta_ccr}" "$ccr"
cp "{delta_report}" "$report"
fi
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
install -D -m 0644 "{payload_root}/delta-object" "$static_root/20260316/ef/00/{delta_hash}"
fi
"#,
full_cir = full_cir_path.display(),
delta_cir = delta_cir_path.display(),
full_ccr = full_ccr_path.display(),
delta_ccr = delta_ccr_path.display(),
full_report = full_report_path.display(),
delta_report = delta_report_path.display(),
payload_root = static_payload_root.display(),
full_hash = full_hash,
delta_hash = delta_hash,
),
)
.unwrap();
std::fs::set_permissions(&stub, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
let proc = Command::new(script)
.args([
"--out-dir",
out.to_string_lossy().as_ref(),
"--tal-path",
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/tal/apnic-rfc7730-https.tal")
.to_string_lossy()
.as_ref(),
"--ta-path",
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/ta/apnic-ta.cer")
.to_string_lossy()
.as_ref(),
"--cir-tal-uri",
"https://rpki.apnic.net/tal/apnic-rfc7730-https.tal",
"--payload-replay-archive",
"/tmp/base-payload-archive",
"--payload-replay-locks",
base_locks.to_string_lossy().as_ref(),
"--payload-base-archive",
"/tmp/base-payload-archive",
"--payload-base-locks",
base_locks.to_string_lossy().as_ref(),
"--payload-delta-archive",
"/tmp/payload-delta-archive",
"--payload-delta-locks",
delta_locks.to_string_lossy().as_ref(),
"--delta-count",
"2",
"--max-depth",
"0",
"--max-instances",
"1",
"--rpki-bin",
stub.to_string_lossy().as_ref(),
])
.output()
.expect("run cir sequence script");
assert!(
proc.status.success(),
"stderr={}",
String::from_utf8_lossy(&proc.stderr)
);
let sequence_json: serde_json::Value =
serde_json::from_slice(&std::fs::read(out.join("sequence.json")).unwrap()).unwrap();
let steps = sequence_json["steps"].as_array().expect("steps");
assert_eq!(steps.len(), 3);
assert_eq!(steps[0]["stepId"], "full");
assert_eq!(steps[1]["stepId"], "delta-001");
assert_eq!(steps[2]["stepId"], "delta-002");
for rel in [
"full/input.cir",
"full/result.ccr",
"full/report.json",
"delta-001/input.cir",
"delta-001/result.ccr",
"delta-001/report.json",
"delta-002/input.cir",
"delta-002/result.ccr",
"delta-002/report.json",
] {
assert!(out.join(rel).is_file(), "missing {}", rel);
}
}

View File

@ -0,0 +1,176 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use rpki::cir::{encode_cir, materialize_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
fn apnic_tal_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/tal/apnic-rfc7730-https.tal")
}
fn apnic_ta_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer")
}
fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
let tal_bytes = std::fs::read(apnic_tal_path()).expect("read tal");
let ta_bytes = std::fs::read(apnic_ta_path()).expect("read ta");
let tal = rpki::data_model::tal::Tal::decode_bytes(&tal_bytes).expect("decode tal");
let ta_rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.expect("tal has rsync uri")
.as_str()
.to_string();
let ta_hash = {
use sha2::{Digest, Sha256};
Sha256::digest(&ta_bytes).to_vec()
};
(
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-04-07T00:00:00Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: ta_rsync_uri,
sha256: ta_hash,
}],
tals: vec![CirTal {
tal_uri: "https://example.test/root.tal".to_string(),
tal_bytes,
}],
},
ta_bytes,
)
}
fn write_static(root: &Path, date: &str, bytes: &[u8]) {
use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&dir).expect("mkdir static");
std::fs::write(dir.join(hash), bytes).expect("write static object");
}
fn prepare_reference_ccr(work: &Path, cir: &CanonicalInputRepresentation, mirror_root: &Path) -> PathBuf {
let reference_ccr = work.join("reference.ccr");
let rpki_bin = env!("CARGO_BIN_EXE_rpki");
let wrapper = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/cir-rsync-wrapper");
let tal_path = apnic_tal_path();
let ta_path = apnic_ta_path();
let out = Command::new(rpki_bin)
.env("REAL_RSYNC_BIN", "/usr/bin/rsync")
.env("CIR_MIRROR_ROOT", mirror_root)
.env("CIR_LOCAL_LINK_MODE", "1")
.args([
"--db",
work.join("reference-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--ta-path",
ta_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
&cir.validation_time.format(&time::format_description::well_known::Rfc3339).unwrap(),
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
reference_ccr.to_string_lossy().as_ref(),
])
.output()
.expect("run reference rpki");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
reference_ccr
}
fn prepare_sequence_root(td: &Path) -> PathBuf {
let sequence_root = td.join("sequence");
let static_root = sequence_root.join("static");
let mirror_root = td.join("mirror");
std::fs::create_dir_all(sequence_root.join("full")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-002")).unwrap();
let (cir, ta_bytes) = build_ta_only_cir();
let cir_bytes = encode_cir(&cir).expect("encode cir");
std::fs::write(sequence_root.join("full").join("input.cir"), &cir_bytes).unwrap();
std::fs::write(sequence_root.join("delta-001").join("input.cir"), &cir_bytes).unwrap();
std::fs::write(sequence_root.join("delta-002").join("input.cir"), &cir_bytes).unwrap();
write_static(&static_root, "20260407", &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).unwrap();
let reference = prepare_reference_ccr(td, &cir, &mirror_root);
std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap();
std::fs::copy(&reference, sequence_root.join("delta-001").join("result.ccr")).unwrap();
std::fs::copy(&reference, sequence_root.join("delta-002").join("result.ccr")).unwrap();
std::fs::write(sequence_root.join("full").join("report.json"), b"{}").unwrap();
std::fs::write(sequence_root.join("delta-001").join("report.json"), b"{}").unwrap();
std::fs::write(sequence_root.join("delta-002").join("report.json"), b"{}").unwrap();
let sequence = serde_json::json!({
"version": 1,
"staticRoot": "static",
"steps": [
{"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null},
{"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"},
{"stepId":"delta-002","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-002/input.cir","ccrPath":"delta-002/result.ccr","reportPath":"delta-002/report.json","previousStepId":"delta-001"}
]
});
std::fs::write(sequence_root.join("sequence.json"), serde_json::to_vec_pretty(&sequence).unwrap()).unwrap();
sequence_root
}
#[test]
fn peer_sequence_replay_scripts_replay_all_steps() {
if !Path::new("/usr/bin/rsync").exists()
|| !Path::new("/home/yuyr/dev/rust_playground/routinator/target/debug/routinator").exists()
|| !Path::new("/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client").exists()
{
return;
}
let td = tempfile::tempdir().expect("tempdir");
let sequence_root = prepare_sequence_root(td.path());
let routinator_script = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("scripts/cir/run_cir_replay_sequence_routinator.sh");
let out = Command::new(routinator_script)
.args([
"--sequence-root",
sequence_root.to_string_lossy().as_ref(),
])
.output()
.expect("run routinator sequence replay");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let r_summary: serde_json::Value = serde_json::from_slice(
&std::fs::read(sequence_root.join("sequence-summary-routinator.json")).unwrap(),
)
.unwrap();
assert_eq!(r_summary["stepCount"], 3);
assert_eq!(r_summary["allMatch"], true);
let rpki_client_script = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("scripts/cir/run_cir_replay_sequence_rpki_client.sh");
let out = Command::new(rpki_client_script)
.args([
"--sequence-root",
sequence_root.to_string_lossy().as_ref(),
"--build-dir",
"/home/yuyr/dev/rpki-client-9.7/build-m5",
])
.output()
.expect("run rpki-client sequence replay");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let c_summary: serde_json::Value = serde_json::from_slice(
&std::fs::read(sequence_root.join("sequence-summary-rpki-client.json")).unwrap(),
)
.unwrap();
assert_eq!(c_summary["stepCount"], 3);
assert_eq!(c_summary["allMatch"], true);
}

View File

@ -0,0 +1,155 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use rpki::cir::{encode_cir, materialize_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1};
fn apnic_tal_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/tal/apnic-rfc7730-https.tal")
}
fn apnic_ta_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer")
}
fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
let tal_bytes = std::fs::read(apnic_tal_path()).expect("read tal");
let ta_bytes = std::fs::read(apnic_ta_path()).expect("read ta");
let tal = rpki::data_model::tal::Tal::decode_bytes(&tal_bytes).expect("decode tal");
let ta_rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.expect("tal has rsync uri")
.as_str()
.to_string();
let ta_hash = {
use sha2::{Digest, Sha256};
Sha256::digest(&ta_bytes).to_vec()
};
(
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-04-07T00:00:00Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: ta_rsync_uri,
sha256: ta_hash,
}],
tals: vec![CirTal {
tal_uri: "https://example.test/root.tal".to_string(),
tal_bytes,
}],
},
ta_bytes,
)
}
fn write_static(root: &Path, date: &str, bytes: &[u8]) {
use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&dir).expect("mkdir static");
std::fs::write(dir.join(hash), bytes).expect("write static object");
}
fn prepare_reference_ccr(work: &Path, cir: &CanonicalInputRepresentation, mirror_root: &Path) -> PathBuf {
let reference_ccr = work.join("reference.ccr");
let rpki_bin = env!("CARGO_BIN_EXE_rpki");
let wrapper = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/cir-rsync-wrapper");
let tal_path = apnic_tal_path();
let ta_path = apnic_ta_path();
let out = Command::new(rpki_bin)
.env("REAL_RSYNC_BIN", "/usr/bin/rsync")
.env("CIR_MIRROR_ROOT", mirror_root)
.env("CIR_LOCAL_LINK_MODE", "1")
.args([
"--db",
work.join("reference-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--ta-path",
ta_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
&cir.validation_time.format(&time::format_description::well_known::Rfc3339).unwrap(),
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
reference_ccr.to_string_lossy().as_ref(),
])
.output()
.expect("run reference rpki");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
reference_ccr
}
#[test]
fn ours_sequence_replay_script_replays_all_steps() {
if !Path::new("/usr/bin/rsync").exists() {
return;
}
let td = tempfile::tempdir().expect("tempdir");
let sequence_root = td.path().join("sequence");
let static_root = sequence_root.join("static");
let mirror_root = td.path().join("mirror");
std::fs::create_dir_all(sequence_root.join("full")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-002")).unwrap();
let (cir, ta_bytes) = build_ta_only_cir();
let cir_bytes = encode_cir(&cir).expect("encode cir");
std::fs::write(sequence_root.join("full").join("input.cir"), &cir_bytes).unwrap();
std::fs::write(sequence_root.join("delta-001").join("input.cir"), &cir_bytes).unwrap();
std::fs::write(sequence_root.join("delta-002").join("input.cir"), &cir_bytes).unwrap();
write_static(&static_root, "20260407", &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).unwrap();
let reference = prepare_reference_ccr(td.path(), &cir, &mirror_root);
std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap();
std::fs::copy(&reference, sequence_root.join("delta-001").join("result.ccr")).unwrap();
std::fs::copy(&reference, sequence_root.join("delta-002").join("result.ccr")).unwrap();
std::fs::write(sequence_root.join("full").join("report.json"), b"{}").unwrap();
std::fs::write(sequence_root.join("delta-001").join("report.json"), b"{}").unwrap();
std::fs::write(sequence_root.join("delta-002").join("report.json"), b"{}").unwrap();
let sequence = serde_json::json!({
"version": 1,
"staticRoot": "static",
"steps": [
{"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null},
{"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"},
{"stepId":"delta-002","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-002/input.cir","ccrPath":"delta-002/result.ccr","reportPath":"delta-002/report.json","previousStepId":"delta-001"}
]
});
std::fs::write(
sequence_root.join("sequence.json"),
serde_json::to_vec_pretty(&sequence).unwrap(),
)
.unwrap();
let script =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_replay_sequence_ours.sh");
let out = Command::new(script)
.args([
"--sequence-root",
sequence_root.to_string_lossy().as_ref(),
"--rpki-bin",
env!("CARGO_BIN_EXE_rpki"),
])
.output()
.expect("run sequence replay");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(sequence_root.join("sequence-summary.json")).unwrap())
.unwrap();
assert_eq!(summary["stepCount"], 3);
assert_eq!(summary["allMatch"], true);
}