20260427 拆分独立repo-bytes.db数据库文件,cir materialize 移除旧兼容

This commit is contained in:
yuyr 2026-04-27 16:01:33 +08:00
parent 944ea6ca00
commit eaa375c5ec
53 changed files with 950 additions and 595 deletions

View File

@ -34,11 +34,7 @@ from pathlib import Path
sequence_root = Path(sys.argv[1]).resolve() sequence_root = Path(sys.argv[1]).resolve()
drop_bin = sys.argv[2] drop_bin = sys.argv[2]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8")) sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"] if "staticRoot" in sequence else None repo_bytes_db = sequence_root / sequence["repoBytesDbPath"]
raw_store_db = sequence_root / sequence["rawStoreDbPath"] if "rawStoreDbPath" in sequence else None
backend_count = sum(x is not None for x in (static_root, raw_store_db))
if backend_count != 1:
raise SystemExit("sequence must set exactly one of staticRoot or rawStoreDbPath")
summaries = [] summaries = []
for step in sequence["steps"]: for step in sequence["steps"]:
@ -58,10 +54,7 @@ for step in sequence["steps"]:
"--md-out", "--md-out",
str(out_dir / "drop.md"), str(out_dir / "drop.md"),
] ]
if static_root is not None: cmd.extend(["--repo-bytes-db", str(repo_bytes_db)])
cmd.extend(["--static-root", str(static_root)])
else:
cmd.extend(["--raw-store-db", str(raw_store_db)])
proc = subprocess.run(cmd, capture_output=True, text=True) proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0: if proc.returncode != 0:
raise SystemExit( raise SystemExit(

View File

@ -92,7 +92,8 @@ if [[ -z "$DELTA_VALIDATION_TIME" ]]; then
fi fi
rm -rf "$OUT_DIR" rm -rf "$OUT_DIR"
mkdir -p "$OUT_DIR/full" "$OUT_DIR/delta-001" "$OUT_DIR/static" mkdir -p "$OUT_DIR/full" "$OUT_DIR/delta-001"
REPO_BYTES_DB="$OUT_DIR/repo-bytes.db"
FULL_DB="$OUT_DIR/full/db" FULL_DB="$OUT_DIR/full/db"
DELTA_DB="$OUT_DIR/delta-001/db" DELTA_DB="$OUT_DIR/delta-001/db"
@ -110,7 +111,7 @@ DELTA_DB="$OUT_DIR/delta-001/db"
--report-json "$OUT_DIR/full/report.json" \ --report-json "$OUT_DIR/full/report.json" \
--cir-enable \ --cir-enable \
--cir-out "$OUT_DIR/full/input.cir" \ --cir-out "$OUT_DIR/full/input.cir" \
--cir-static-root "$OUT_DIR/static" \ --repo-bytes-db "$REPO_BYTES_DB" \
--cir-tal-uri "$CIR_TAL_URI" \ --cir-tal-uri "$CIR_TAL_URI" \
>"$OUT_DIR/full/run.stdout.log" 2>"$OUT_DIR/full/run.stderr.log" >"$OUT_DIR/full/run.stdout.log" 2>"$OUT_DIR/full/run.stderr.log"
@ -130,7 +131,7 @@ DELTA_DB="$OUT_DIR/delta-001/db"
--report-json "$OUT_DIR/delta-001/report.json" \ --report-json "$OUT_DIR/delta-001/report.json" \
--cir-enable \ --cir-enable \
--cir-out "$OUT_DIR/delta-001/input.cir" \ --cir-out "$OUT_DIR/delta-001/input.cir" \
--cir-static-root "$OUT_DIR/static" \ --repo-bytes-db "$REPO_BYTES_DB" \
--cir-tal-uri "$CIR_TAL_URI" \ --cir-tal-uri "$CIR_TAL_URI" \
>"$OUT_DIR/delta-001/run.stdout.log" 2>"$OUT_DIR/delta-001/run.stderr.log" >"$OUT_DIR/delta-001/run.stdout.log" 2>"$OUT_DIR/delta-001/run.stderr.log"
@ -143,13 +144,12 @@ from pathlib import Path
out = Path(sys.argv[1]) out = Path(sys.argv[1])
base_validation_time = sys.argv[2] base_validation_time = sys.argv[2]
delta_validation_time = sys.argv[3] delta_validation_time = sys.argv[3]
static_files = sum(1 for _ in (out / "static").rglob("*") if _.is_file())
summary = { summary = {
"version": 1, "version": 1,
"kind": "cir_pair", "kind": "cir_pair",
"baseValidationTime": base_validation_time, "baseValidationTime": base_validation_time,
"deltaValidationTime": delta_validation_time, "deltaValidationTime": delta_validation_time,
"staticRoot": "static", "repoBytesDbPath": "repo-bytes.db",
"steps": [ "steps": [
{ {
"kind": "full", "kind": "full",
@ -165,7 +165,7 @@ summary = {
"previous": "full", "previous": "full",
}, },
], ],
"staticFileCount": static_files, "repoBytesDbExists": (out / "repo-bytes.db").exists(),
} }
(out / "summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8") (out / "summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
PY PY

View File

@ -116,13 +116,13 @@ for rir in rirs:
"rir": rir, "rir": rir,
"root": str(root), "root": str(root),
"stepCount": len(seq["steps"]), "stepCount": len(seq["steps"]),
"staticFileCount": summ["staticFileCount"], "repoBytesDbExists": summ.get("repoBytesDbExists", False),
}) })
summary = {"version": 1, "rirs": items} summary = {"version": 1, "rirs": items}
summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8") summary_json.write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = ["# Multi-RIR Offline CIR Sequence Summary", ""] lines = ["# Multi-RIR Offline CIR Sequence Summary", ""]
for item in items: for item in items:
lines.append(f"- `{item['rir']}`: `stepCount={item['stepCount']}` `staticFileCount={item['staticFileCount']}` `root={item['root']}`") lines.append(f"- `{item['rir']}`: `stepCount={item['stepCount']}` `repoBytesDbExists={item['repoBytesDbExists']}` `root={item['root']}`")
summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8") summary_md.write_text("\n".join(lines) + "\n", encoding="utf-8")
PY PY

View File

@ -97,7 +97,8 @@ if [[ -z "$DELTA_VALIDATION_TIME" ]]; then
fi fi
rm -rf "$OUT_DIR" rm -rf "$OUT_DIR"
mkdir -p "$OUT_DIR/static" "$OUT_DIR/full" mkdir -p "$OUT_DIR/full"
REPO_BYTES_DB="$OUT_DIR/repo-bytes.db"
run_step() { run_step() {
local kind="$1" local kind="$1"
@ -114,7 +115,7 @@ run_step() {
--report-json "$step_dir/report.json" \ --report-json "$step_dir/report.json" \
--cir-enable \ --cir-enable \
--cir-out "$step_dir/input.cir" \ --cir-out "$step_dir/input.cir" \
--cir-static-root "$OUT_DIR/static" \ --repo-bytes-db "$REPO_BYTES_DB" \
--cir-tal-uri "$CIR_TAL_URI" --cir-tal-uri "$CIR_TAL_URI"
) )
if [[ "$FULL_REPO" -ne 1 ]]; then if [[ "$FULL_REPO" -ne 1 ]]; then
@ -186,7 +187,7 @@ for idx in range(1, delta_count + 1):
summary = { summary = {
"version": 1, "version": 1,
"kind": "cir_sequence_offline", "kind": "cir_sequence_offline",
"staticRoot": "static", "repoBytesDbPath": "repo-bytes.db",
"steps": steps, "steps": steps,
} }
(out / "sequence.json").write_text(json.dumps(summary, indent=2), encoding="utf-8") (out / "sequence.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
@ -195,7 +196,8 @@ summary = {
{ {
"version": 1, "version": 1,
"stepCount": len(steps), "stepCount": len(steps),
"staticFileCount": sum(1 for p in (out / "static").rglob("*") if p.is_file()), "repoBytesDbPath": "repo-bytes.db",
"repoBytesDbExists": (out / "repo-bytes.db").exists(),
}, },
indent=2, indent=2,
), ),

View File

@ -87,6 +87,7 @@ fi
mkdir -p "$OUT" mkdir -p "$OUT"
DB="$OUT/work-db" DB="$OUT/work-db"
RAW_STORE_DB="$OUT/raw-store.db" RAW_STORE_DB="$OUT/raw-store.db"
REPO_BYTES_DB="$OUT/repo-bytes.db"
ROWS="$OUT/.sequence_rows.tsv" ROWS="$OUT/.sequence_rows.tsv"
: > "$ROWS" : > "$ROWS"
@ -140,6 +141,7 @@ PY
target/release/rpki target/release/rpki
--db "$DB" --db "$DB"
--raw-store-db "$RAW_STORE_DB" --raw-store-db "$RAW_STORE_DB"
--repo-bytes-db "$REPO_BYTES_DB"
--tal-path "$TAL_REL" --tal-path "$TAL_REL"
--ta-path "$TA_REL" --ta-path "$TA_REL"
--ccr-out "$ccr_out" --ccr-out "$ccr_out"
@ -216,7 +218,7 @@ for idx, row in enumerate(rows):
}) })
(out / "sequence.json").write_text( (out / "sequence.json").write_text(
json.dumps({"version": 1, "rawStoreDbPath": "raw-store.db", "steps": steps}, indent=2), json.dumps({"version": 1, "repoBytesDbPath": "repo-bytes.db", "steps": steps}, indent=2),
encoding="utf-8", encoding="utf-8",
) )

View File

@ -56,16 +56,17 @@ IFS=',' read -r -a ITEMS <<< "$RIRS"
for rir in "${ITEMS[@]}"; do for rir in "${ITEMS[@]}"; do
read -r tal_rel ta_rel < <(case_paths "$rir") read -r tal_rel ta_rel < <(case_paths "$rir")
rir_root="$OUT_ROOT/$rir" rir_root="$OUT_ROOT/$rir"
mkdir -p "$rir_root/full" "$rir_root/static" mkdir -p "$rir_root/full"
repo_bytes_db="$rir_root/repo-bytes.db"
"$HELPER_BIN" \ "$HELPER_BIN" \
--tal-path "$ROOT_DIR/$tal_rel" \ --tal-path "$ROOT_DIR/$tal_rel" \
--ta-path "$ROOT_DIR/$ta_rel" \ --ta-path "$ROOT_DIR/$ta_rel" \
--tal-uri "https://example.test/$rir.tal" \ --tal-uri "https://example.test/$rir.tal" \
--validation-time "2026-04-09T00:00:00Z" \ --validation-time "2026-04-09T00:00:00Z" \
--cir-out "$rir_root/full/input.cir" \ --cir-out "$rir_root/full/input.cir" \
--static-root "$rir_root/static" --repo-bytes-db "$repo_bytes_db"
"$EXTRACT_BIN" --cir "$rir_root/full/input.cir" --tals-dir "$rir_root/.tmp/tals" --meta-json "$rir_root/.tmp/meta.json" "$EXTRACT_BIN" --cir "$rir_root/full/input.cir" --tals-dir "$rir_root/.tmp/tals" --meta-json "$rir_root/.tmp/meta.json"
"$MATERIALIZE_BIN" --cir "$rir_root/full/input.cir" --static-root "$rir_root/static" --mirror-root "$rir_root/.tmp/mirror" "$MATERIALIZE_BIN" --cir "$rir_root/full/input.cir" --repo-bytes-db "$repo_bytes_db" --mirror-root "$rir_root/.tmp/mirror"
FIRST_TAL="$(python3 - <<'PY' "$rir_root/.tmp/meta.json" FIRST_TAL="$(python3 - <<'PY' "$rir_root/.tmp/meta.json"
import json,sys import json,sys
print(json.load(open(sys.argv[1]))["talFiles"][0]["path"]) print(json.load(open(sys.argv[1]))["talFiles"][0]["path"])
@ -99,7 +100,7 @@ for i in range(1, delta_count + 1):
step = f"delta-{i:03d}" step = f"delta-{i:03d}"
steps.append({"stepId":step,"kind":"delta","validationTime":"2026-04-09T00:00:00Z","cirPath":f"{step}/input.cir","ccrPath":f"{step}/result.ccr","reportPath":f"{step}/report.json","previousStepId":prev}) steps.append({"stepId":step,"kind":"delta","validationTime":"2026-04-09T00:00:00Z","cirPath":f"{step}/input.cir","ccrPath":f"{step}/result.ccr","reportPath":f"{step}/report.json","previousStepId":prev})
prev = step prev = step
(root/"sequence.json").write_text(json.dumps({"version":1,"staticRoot":"static","steps":steps}, indent=2), encoding="utf-8") (root/"sequence.json").write_text(json.dumps({"version":1,"repoBytesDbPath":"repo-bytes.db","steps":steps}, indent=2), encoding="utf-8")
(root/"summary.json").write_text(json.dumps({"version":1,"stepCount":len(steps)}, indent=2), encoding="utf-8") (root/"summary.json").write_text(json.dumps({"version":1,"stepCount":len(steps)}, indent=2), encoding="utf-8")
PY PY
done done

View File

@ -6,7 +6,7 @@ usage() {
Usage: Usage:
./scripts/cir/run_cir_replay_matrix.sh \ ./scripts/cir/run_cir_replay_matrix.sh \
--cir <path> \ --cir <path> \
--static-root <path> \ --repo-bytes-db <path> \
--out-dir <path> \ --out-dir <path> \
--reference-ccr <path> \ --reference-ccr <path> \
--rpki-client-build-dir <path> \ --rpki-client-build-dir <path> \
@ -21,7 +21,7 @@ EOF
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR="" CIR=""
STATIC_ROOT="" REPO_BYTES_DB=""
OUT_DIR="" OUT_DIR=""
REFERENCE_CCR="" REFERENCE_CCR=""
RPKI_CLIENT_BUILD_DIR="" RPKI_CLIENT_BUILD_DIR=""
@ -37,7 +37,7 @@ RPKI_CLIENT_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_rpki_client.sh"
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--cir) CIR="$2"; shift 2 ;; --cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;; --repo-bytes-db) REPO_BYTES_DB="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;; --out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;; --reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--rpki-client-build-dir) RPKI_CLIENT_BUILD_DIR="$2"; shift 2 ;; --rpki-client-build-dir) RPKI_CLIENT_BUILD_DIR="$2"; shift 2 ;;
@ -51,7 +51,7 @@ while [[ $# -gt 0 ]]; do
esac esac
done done
[[ -n "$CIR" && -n "$STATIC_ROOT" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" && -n "$RPKI_CLIENT_BUILD_DIR" ]] || { [[ -n "$CIR" && -n "$REPO_BYTES_DB" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" && -n "$RPKI_CLIENT_BUILD_DIR" ]] || {
usage >&2 usage >&2
exit 2 exit 2
} }
@ -102,7 +102,7 @@ mkdir -p "$OURS_OUT" "$ROUTINATOR_OUT" "$RPKI_CLIENT_OUT"
ours_cmd=( ours_cmd=(
"$OURS_SCRIPT" "$OURS_SCRIPT"
--cir "$CIR" --cir "$CIR"
--static-root "$STATIC_ROOT" --repo-bytes-db "$REPO_BYTES_DB"
--out-dir "$OURS_OUT" --out-dir "$OURS_OUT"
--reference-ccr "$REFERENCE_CCR" --reference-ccr "$REFERENCE_CCR"
--rpki-bin "$RPKI_BIN" --rpki-bin "$RPKI_BIN"
@ -111,7 +111,7 @@ ours_cmd=(
routinator_cmd=( routinator_cmd=(
"$ROUTINATOR_SCRIPT" "$ROUTINATOR_SCRIPT"
--cir "$CIR" --cir "$CIR"
--static-root "$STATIC_ROOT" --repo-bytes-db "$REPO_BYTES_DB"
--out-dir "$ROUTINATOR_OUT" --out-dir "$ROUTINATOR_OUT"
--reference-ccr "$REFERENCE_CCR" --reference-ccr "$REFERENCE_CCR"
--routinator-root "$ROUTINATOR_ROOT" --routinator-root "$ROUTINATOR_ROOT"
@ -121,7 +121,7 @@ routinator_cmd=(
rpki_client_cmd=( rpki_client_cmd=(
"$RPKI_CLIENT_SCRIPT" "$RPKI_CLIENT_SCRIPT"
--cir "$CIR" --cir "$CIR"
--static-root "$STATIC_ROOT" --repo-bytes-db "$REPO_BYTES_DB"
--out-dir "$RPKI_CLIENT_OUT" --out-dir "$RPKI_CLIENT_OUT"
--reference-ccr "$REFERENCE_CCR" --reference-ccr "$REFERENCE_CCR"
--build-dir "$RPKI_CLIENT_BUILD_DIR" --build-dir "$RPKI_CLIENT_BUILD_DIR"
@ -159,7 +159,7 @@ DETAIL_MD="$OUT_DIR/detail.md"
python3 - <<'PY' \ python3 - <<'PY' \
"$CIR" \ "$CIR" \
"$STATIC_ROOT" \ "$REPO_BYTES_DB" \
"$REFERENCE_CCR" \ "$REFERENCE_CCR" \
"$OURS_OUT" \ "$OURS_OUT" \
"$ROUTINATOR_OUT" \ "$ROUTINATOR_OUT" \
@ -171,7 +171,7 @@ import json
import sys import sys
from pathlib import Path from pathlib import Path
cir_path, static_root, reference_ccr, ours_out, routinator_out, rpki_client_out, summary_json, summary_md, detail_md = sys.argv[1:] cir_path, repo_bytes_db, reference_ccr, ours_out, routinator_out, rpki_client_out, summary_json, summary_md, detail_md = sys.argv[1:]
participants = [] participants = []
all_match = True all_match = True
@ -204,7 +204,7 @@ for name, out_dir in [
summary = { summary = {
"cirPath": cir_path, "cirPath": cir_path,
"staticRoot": static_root, "repoBytesDb": repo_bytes_db,
"referenceCcr": reference_ccr, "referenceCcr": reference_ccr,
"participants": participants, "participants": participants,
"allMatch": all_match, "allMatch": all_match,
@ -215,7 +215,7 @@ lines = [
"# CIR Replay Matrix Summary", "# CIR Replay Matrix Summary",
"", "",
f"- `cir`: `{cir_path}`", f"- `cir`: `{cir_path}`",
f"- `static_root`: `{static_root}`", f"- `repo_bytes_db`: `{repo_bytes_db}`",
f"- `reference_ccr`: `{reference_ccr}`", f"- `reference_ccr`: `{reference_ccr}`",
f"- `all_match`: `{all_match}`", f"- `all_match`: `{all_match}`",
"", "",

View File

@ -6,7 +6,7 @@ usage() {
Usage: Usage:
./scripts/cir/run_cir_replay_ours.sh \ ./scripts/cir/run_cir_replay_ours.sh \
--cir <path> \ --cir <path> \
[--static-root <path> | --raw-store-db <path>] \ --repo-bytes-db <path> \
--out-dir <path> \ --out-dir <path> \
--reference-ccr <path> \ --reference-ccr <path> \
[--keep-db] \ [--keep-db] \
@ -18,9 +18,7 @@ EOF
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR="" CIR=""
STATIC_ROOT="" REPO_BYTES_DB=""
POOL_DB=""
RAW_STORE_DB=""
OUT_DIR="" OUT_DIR=""
REFERENCE_CCR="" REFERENCE_CCR=""
KEEP_DB=0 KEEP_DB=0
@ -34,8 +32,7 @@ WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--cir) CIR="$2"; shift 2 ;; --cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;; --repo-bytes-db) REPO_BYTES_DB="$2"; shift 2 ;;
--raw-store-db) RAW_STORE_DB="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;; --out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;; --reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;; --keep-db) KEEP_DB=1; shift ;;
@ -46,14 +43,10 @@ while [[ $# -gt 0 ]]; do
esac esac
done done
[[ -n "$CIR" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || { [[ -n "$CIR" && -n "$REPO_BYTES_DB" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || {
usage >&2 usage >&2
exit 2 exit 2
} }
backend_count=0
[[ -n "$STATIC_ROOT" ]] && backend_count=$((backend_count+1))
[[ -n "$RAW_STORE_DB" ]] && backend_count=$((backend_count+1))
[[ "$backend_count" -eq 1 ]] || { usage >&2; exit 2; }
mkdir -p "$OUT_DIR" mkdir -p "$OUT_DIR"
@ -82,12 +75,7 @@ rm -rf "$TMP_ROOT"
mkdir -p "$TMP_ROOT" mkdir -p "$TMP_ROOT"
"$CIR_EXTRACT_INPUTS_BIN" --cir "$CIR" --tals-dir "$TALS_DIR" --meta-json "$META_JSON" "$CIR_EXTRACT_INPUTS_BIN" --cir "$CIR" --tals-dir "$TALS_DIR" --meta-json "$META_JSON"
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --mirror-root "$MIRROR_ROOT") materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --repo-bytes-db "$REPO_BYTES_DB" --mirror-root "$MIRROR_ROOT")
if [[ -n "$STATIC_ROOT" ]]; then
materialize_cmd+=(--static-root "$STATIC_ROOT")
else
materialize_cmd+=(--raw-store-db "$RAW_STORE_DB")
fi
if [[ "$KEEP_DB" -eq 1 ]]; then if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db) materialize_cmd+=(--keep-db)
fi fi

View File

@ -6,7 +6,7 @@ usage() {
Usage: Usage:
./scripts/cir/run_cir_replay_routinator.sh \ ./scripts/cir/run_cir_replay_routinator.sh \
--cir <path> \ --cir <path> \
[--static-root <path> | --raw-store-db <path>] \ --repo-bytes-db <path> \
--out-dir <path> \ --out-dir <path> \
--reference-ccr <path> \ --reference-ccr <path> \
[--keep-db] \ [--keep-db] \
@ -20,9 +20,7 @@ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
RPKI_DEV_ROOT="${RPKI_DEV_ROOT:-$ROOT_DIR}" RPKI_DEV_ROOT="${RPKI_DEV_ROOT:-$ROOT_DIR}"
CIR="" CIR=""
STATIC_ROOT="" REPO_BYTES_DB=""
POOL_DB=""
RAW_STORE_DB=""
OUT_DIR="" OUT_DIR=""
REFERENCE_CCR="" REFERENCE_CCR=""
KEEP_DB=0 KEEP_DB=0
@ -39,8 +37,7 @@ FAKETIME_LIB="${FAKETIME_LIB:-$ROOT_DIR/target/tools/faketime_pkg/extracted/libf
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--cir) CIR="$2"; shift 2 ;; --cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;; --repo-bytes-db) REPO_BYTES_DB="$2"; shift 2 ;;
--raw-store-db) RAW_STORE_DB="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;; --out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;; --reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;; --keep-db) KEEP_DB=1; shift ;;
@ -52,14 +49,10 @@ while [[ $# -gt 0 ]]; do
esac esac
done done
[[ -n "$CIR" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || { [[ -n "$CIR" && -n "$REPO_BYTES_DB" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || {
usage >&2 usage >&2
exit 2 exit 2
} }
backend_count=0
[[ -n "$STATIC_ROOT" ]] && backend_count=$((backend_count+1))
[[ -n "$RAW_STORE_DB" ]] && backend_count=$((backend_count+1))
[[ "$backend_count" -eq 1 ]] || { usage >&2; exit 2; }
mkdir -p "$OUT_DIR" mkdir -p "$OUT_DIR"
if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
@ -102,12 +95,7 @@ for tal in Path(sys.argv[1]).glob("*.tal"):
seen_sep = True seen_sep = True
tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8") tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8")
PY PY
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --mirror-root "$MIRROR_ROOT") materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --repo-bytes-db "$REPO_BYTES_DB" --mirror-root "$MIRROR_ROOT")
if [[ -n "$STATIC_ROOT" ]]; then
materialize_cmd+=(--static-root "$STATIC_ROOT")
else
materialize_cmd+=(--raw-store-db "$RAW_STORE_DB")
fi
if [[ "$KEEP_DB" -eq 1 ]]; then if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db) materialize_cmd+=(--keep-db)
fi fi

View File

@ -6,10 +6,10 @@ usage() {
Usage: Usage:
./scripts/cir/run_cir_replay_rpki_client.sh \ ./scripts/cir/run_cir_replay_rpki_client.sh \
--cir <path> \ --cir <path> \
[--static-root <path> | --raw-store-db <path>] \ --repo-bytes-db <path> \
--out-dir <path> \ --out-dir <path> \
--reference-ccr <path> \ --reference-ccr <path> \
--build-dir <path> \ [--build-dir <path> | --rpki-client-bin <path>] \
[--keep-db] \ [--keep-db] \
[--real-rsync-bin <path>] [--real-rsync-bin <path>]
EOF EOF
@ -17,12 +17,11 @@ EOF
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR="" CIR=""
STATIC_ROOT="" REPO_BYTES_DB=""
POOL_DB=""
RAW_STORE_DB=""
OUT_DIR="" OUT_DIR=""
REFERENCE_CCR="" REFERENCE_CCR=""
BUILD_DIR="" BUILD_DIR=""
RPKI_CLIENT_BIN=""
KEEP_DB=0 KEEP_DB=0
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}" REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
CIR_MATERIALIZE_BIN="${CIR_MATERIALIZE_BIN:-$ROOT_DIR/target/release/cir_materialize}" CIR_MATERIALIZE_BIN="${CIR_MATERIALIZE_BIN:-$ROOT_DIR/target/release/cir_materialize}"
@ -33,11 +32,11 @@ WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--cir) CIR="$2"; shift 2 ;; --cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;; --repo-bytes-db) REPO_BYTES_DB="$2"; shift 2 ;;
--raw-store-db) RAW_STORE_DB="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;; --out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;; --reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--build-dir) BUILD_DIR="$2"; shift 2 ;; --build-dir) BUILD_DIR="$2"; shift 2 ;;
--rpki-client-bin) RPKI_CLIENT_BIN="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;; --keep-db) KEEP_DB=1; shift ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;; --real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;; -h|--help) usage; exit 0 ;;
@ -45,14 +44,17 @@ while [[ $# -gt 0 ]]; do
esac esac
done done
[[ -n "$CIR" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" && -n "$BUILD_DIR" ]] || { [[ -n "$CIR" && -n "$REPO_BYTES_DB" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || {
usage >&2 usage >&2
exit 2 exit 2
} }
backend_count=0 if [[ -z "$BUILD_DIR" && -z "$RPKI_CLIENT_BIN" ]]; then
[[ -n "$STATIC_ROOT" ]] && backend_count=$((backend_count+1)) usage >&2
[[ -n "$RAW_STORE_DB" ]] && backend_count=$((backend_count+1)) exit 2
[[ "$backend_count" -eq 1 ]] || { usage >&2; exit 2; } fi
if [[ -z "$RPKI_CLIENT_BIN" ]]; then
RPKI_CLIENT_BIN="$BUILD_DIR/src/rpki-client"
fi
mkdir -p "$OUT_DIR" mkdir -p "$OUT_DIR"
if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
@ -97,12 +99,7 @@ for tal in Path(sys.argv[1]).glob("*.tal"):
seen_sep = True seen_sep = True
tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8") tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8")
PY PY
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --mirror-root "$MIRROR_ROOT") materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --repo-bytes-db "$REPO_BYTES_DB" --mirror-root "$MIRROR_ROOT")
if [[ -n "$STATIC_ROOT" ]]; then
materialize_cmd+=(--static-root "$STATIC_ROOT")
else
materialize_cmd+=(--raw-store-db "$RAW_STORE_DB")
fi
if [[ "$KEEP_DB" -eq 1 ]]; then if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db) materialize_cmd+=(--keep-db)
fi fi
@ -133,7 +130,8 @@ export REAL_RSYNC_BIN="$REAL_RSYNC_BIN"
export CIR_LOCAL_LINK_MODE=1 export CIR_LOCAL_LINK_MODE=1
mkdir -p "$CACHE_DIR" "$OUT_CCR_DIR" mkdir -p "$CACHE_DIR" "$OUT_CCR_DIR"
"$BUILD_DIR/src/rpki-client" \ chmod -R 0777 "$TMP_ROOT"
"$RPKI_CLIENT_BIN" \
-R \ -R \
-e "$WRAPPER" \ -e "$WRAPPER" \
-P "$VALIDATION_EPOCH" \ -P "$VALIDATION_EPOCH" \
@ -141,11 +139,53 @@ mkdir -p "$CACHE_DIR" "$OUT_CCR_DIR"
-d "$CACHE_DIR" \ -d "$CACHE_DIR" \
"$OUT_CCR_DIR" >"$RUN_LOG" 2>&1 "$OUT_CCR_DIR" >"$RUN_LOG" 2>&1
if [[ -f "$OUT_CCR_DIR/rpki.ccr" ]]; then
"$CCR_TO_COMPARE_VIEWS_BIN" \ "$CCR_TO_COMPARE_VIEWS_BIN" \
--ccr "$OUT_CCR_DIR/rpki.ccr" \ --ccr "$OUT_CCR_DIR/rpki.ccr" \
--vrps-out "$ACTUAL_VRPS" \ --vrps-out "$ACTUAL_VRPS" \
--vaps-out "$ACTUAL_VAPS" \ --vaps-out "$ACTUAL_VAPS" \
--trust-anchor "$TA_NAME" --trust-anchor "$TA_NAME"
else
python3 - <<'PY' "$OUT_CCR_DIR/json" "$ACTUAL_VRPS" "$ACTUAL_VAPS" "$TA_NAME"
import csv
import json
import sys
from pathlib import Path
json_path = Path(sys.argv[1])
vrps_out = Path(sys.argv[2])
vaps_out = Path(sys.argv[3])
default_ta = sys.argv[4]
if not json_path.is_file():
raise SystemExit(f"rpki-client output has neither rpki.ccr nor json: {json_path}")
data = json.loads(json_path.read_text(encoding="utf-8"))
vrps_out.parent.mkdir(parents=True, exist_ok=True)
with vrps_out.open("w", newline="", encoding="utf-8") as fh:
writer = csv.writer(fh)
writer.writerow(["ASN", "IP Prefix", "Max Length", "Trust Anchor"])
for roa in data.get("roas", []):
writer.writerow([
f"AS{roa['asn']}",
roa["prefix"],
str(roa["maxLength"]),
roa.get("ta") or default_ta,
])
with vaps_out.open("w", newline="", encoding="utf-8") as fh:
writer = csv.writer(fh)
writer.writerow(["Customer ASN", "Providers", "Trust Anchor"])
for aspa in data.get("aspas", []):
providers = ";".join(f"AS{item}" for item in sorted(aspa.get("providers", [])))
writer.writerow([
f"AS{aspa['customer_asid']}",
providers,
aspa.get("ta") or default_ta,
])
PY
fi
python3 - <<'PY' "$ACTUAL_VRPS" "$ACTUAL_VAPS" "$ACTUAL_VRPS_META" "$ACTUAL_VAPS_META" python3 - <<'PY' "$ACTUAL_VRPS" "$ACTUAL_VAPS" "$ACTUAL_VRPS_META" "$ACTUAL_VAPS_META"
import csv, json, sys import csv, json, sys

View File

@ -55,11 +55,7 @@ rpki_bin = sys.argv[6]
real_rsync_bin = sys.argv[7] real_rsync_bin = sys.argv[7]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8")) sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"] if "staticRoot" in sequence else None repo_bytes_db = sequence_root / sequence["repoBytesDbPath"]
raw_store_db = sequence_root / sequence["rawStoreDbPath"] if "rawStoreDbPath" in sequence else None
backend_count = sum(x is not None for x in (static_root, raw_store_db))
if backend_count != 1:
raise SystemExit("sequence must set exactly one of staticRoot or rawStoreDbPath")
steps = sequence["steps"] steps = sequence["steps"]
results = [] results = []
@ -81,10 +77,7 @@ for step in steps:
"--real-rsync-bin", "--real-rsync-bin",
real_rsync_bin, real_rsync_bin,
] ]
if static_root is not None: cmd.extend(["--repo-bytes-db", str(repo_bytes_db)])
cmd.extend(["--static-root", str(static_root)])
else:
cmd.extend(["--raw-store-db", str(raw_store_db)])
proc = subprocess.run(cmd, capture_output=True, text=True) proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0: if proc.returncode != 0:
raise SystemExit( raise SystemExit(

View File

@ -57,11 +57,7 @@ routinator_bin = sys.argv[6]
real_rsync_bin = sys.argv[7] real_rsync_bin = sys.argv[7]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8")) sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"] if "staticRoot" in sequence else None repo_bytes_db = sequence_root / sequence["repoBytesDbPath"]
raw_store_db = sequence_root / sequence["rawStoreDbPath"] if "rawStoreDbPath" in sequence else None
backend_count = sum(x is not None for x in (static_root, raw_store_db))
if backend_count != 1:
raise SystemExit("sequence must set exactly one of staticRoot or rawStoreDbPath")
steps = sequence["steps"] steps = sequence["steps"]
results = [] results = []
all_match = True all_match = True
@ -85,10 +81,7 @@ for step in steps:
"--real-rsync-bin", "--real-rsync-bin",
real_rsync_bin, real_rsync_bin,
] ]
if static_root is not None: cmd.extend(["--repo-bytes-db", str(repo_bytes_db)])
cmd.extend(["--static-root", str(static_root)])
else:
cmd.extend(["--raw-store-db", str(raw_store_db)])
proc = subprocess.run(cmd, capture_output=True, text=True) proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0: if proc.returncode != 0:
raise SystemExit( raise SystemExit(

View File

@ -6,7 +6,7 @@ usage() {
Usage: Usage:
./scripts/cir/run_cir_replay_sequence_rpki_client.sh \ ./scripts/cir/run_cir_replay_sequence_rpki_client.sh \
--sequence-root <path> \ --sequence-root <path> \
--build-dir <path> \ [--build-dir <path> | --rpki-client-bin <path>] \
[--real-rsync-bin <path>] [--real-rsync-bin <path>]
EOF EOF
} }
@ -14,6 +14,7 @@ EOF
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEQUENCE_ROOT="" SEQUENCE_ROOT=""
BUILD_DIR="" BUILD_DIR=""
RPKI_CLIENT_BIN=""
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}" REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
STEP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_rpki_client.sh" STEP_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_rpki_client.sh"
@ -21,13 +22,14 @@ while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;; --sequence-root) SEQUENCE_ROOT="$2"; shift 2 ;;
--build-dir) BUILD_DIR="$2"; shift 2 ;; --build-dir) BUILD_DIR="$2"; shift 2 ;;
--rpki-client-bin) RPKI_CLIENT_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;; --real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;; -h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;; *) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac esac
done done
[[ -n "$SEQUENCE_ROOT" && -n "$BUILD_DIR" ]] || { usage >&2; exit 2; } [[ -n "$SEQUENCE_ROOT" && ( -n "$BUILD_DIR" || -n "$RPKI_CLIENT_BIN" ) ]] || { usage >&2; exit 2; }
SEQUENCE_ROOT="$(python3 - <<'PY' "$SEQUENCE_ROOT" SEQUENCE_ROOT="$(python3 - <<'PY' "$SEQUENCE_ROOT"
from pathlib import Path from pathlib import Path
@ -39,7 +41,7 @@ PY
SUMMARY_JSON="$SEQUENCE_ROOT/sequence-summary-rpki-client.json" SUMMARY_JSON="$SEQUENCE_ROOT/sequence-summary-rpki-client.json"
SUMMARY_MD="$SEQUENCE_ROOT/sequence-summary-rpki-client.md" SUMMARY_MD="$SEQUENCE_ROOT/sequence-summary-rpki-client.md"
python3 - <<'PY' "$SEQUENCE_ROOT" "$SUMMARY_JSON" "$SUMMARY_MD" "$STEP_SCRIPT" "$BUILD_DIR" "$REAL_RSYNC_BIN" python3 - <<'PY' "$SEQUENCE_ROOT" "$SUMMARY_JSON" "$SUMMARY_MD" "$STEP_SCRIPT" "$BUILD_DIR" "$RPKI_CLIENT_BIN" "$REAL_RSYNC_BIN"
import json import json
import subprocess import subprocess
import sys import sys
@ -50,14 +52,11 @@ summary_json = Path(sys.argv[2])
summary_md = Path(sys.argv[3]) summary_md = Path(sys.argv[3])
step_script = Path(sys.argv[4]) step_script = Path(sys.argv[4])
build_dir = sys.argv[5] build_dir = sys.argv[5]
real_rsync_bin = sys.argv[6] rpki_client_bin = sys.argv[6]
real_rsync_bin = sys.argv[7]
sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8")) sequence = json.loads((sequence_root / "sequence.json").read_text(encoding="utf-8"))
static_root = sequence_root / sequence["staticRoot"] if "staticRoot" in sequence else None repo_bytes_db = sequence_root / sequence["repoBytesDbPath"]
raw_store_db = sequence_root / sequence["rawStoreDbPath"] if "rawStoreDbPath" in sequence else None
backend_count = sum(x is not None for x in (static_root, raw_store_db))
if backend_count != 1:
raise SystemExit("sequence must set exactly one of staticRoot or rawStoreDbPath")
steps = sequence["steps"] steps = sequence["steps"]
results = [] results = []
all_match = True all_match = True
@ -74,15 +73,14 @@ for step in steps:
str(out_dir), str(out_dir),
"--reference-ccr", "--reference-ccr",
str(sequence_root / step["ccrPath"]), str(sequence_root / step["ccrPath"]),
"--build-dir",
build_dir,
"--real-rsync-bin", "--real-rsync-bin",
real_rsync_bin, real_rsync_bin,
] ]
if static_root is not None: if rpki_client_bin:
cmd.extend(["--static-root", str(static_root)]) cmd.extend(["--rpki-client-bin", rpki_client_bin])
else: else:
cmd.extend(["--raw-store-db", str(raw_store_db)]) cmd.extend(["--build-dir", build_dir])
cmd.extend(["--repo-bytes-db", str(repo_bytes_db)])
proc = subprocess.run(cmd, capture_output=True, text=True) proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0: if proc.returncode != 0:
raise SystemExit( raise SystemExit(

View File

@ -193,10 +193,10 @@ for rir in "${RIRS[@]}"; do
done done
if [[ "$KIND" == "snapshot" ]]; then if [[ "$KIND" == "snapshot" ]]; then
rm -rf state/ours/work-db state/ours/raw-store.db state/rpki-client/cache state/rpki-client/out state/rpki-client/ta state/rpki-client/.ta rm -rf state/ours/work-db state/ours/raw-store.db state/ours/repo-bytes.db state/rpki-client/cache state/rpki-client/out state/rpki-client/ta state/rpki-client/.ta
fi fi
mkdir -p state/ours/work-db state/ours/raw-store.db state/rpki-client/cache state/rpki-client/out state/rpki-client/ta state/rpki-client/.ta mkdir -p state/ours/work-db state/ours/raw-store.db state/ours/repo-bytes.db state/rpki-client/cache state/rpki-client/out state/rpki-client/ta state/rpki-client/.ta
chmod 0777 state/ours/work-db state/ours/raw-store.db chmod 0777 state/ours/work-db state/ours/raw-store.db state/ours/repo-bytes.db
chmod -R 0777 state/rpki-client chmod -R 0777 state/rpki-client
START_EPOCH="$(python3 - <<'PY' START_EPOCH="$(python3 - <<'PY'
@ -222,6 +222,7 @@ PY
env RPKI_PROGRESS_LOG=1 RPKI_PROGRESS_SLOW_SECS=0 ./rpki \ env RPKI_PROGRESS_LOG=1 RPKI_PROGRESS_SLOW_SECS=0 ./rpki \
--db state/ours/work-db \ --db state/ours/work-db \
--raw-store-db state/ours/raw-store.db \ --raw-store-db state/ours/raw-store.db \
--repo-bytes-db state/ours/repo-bytes.db \
"${OURS_TAL_ARGS[@]}" \ "${OURS_TAL_ARGS[@]}" \
--parallel-phase1 \ --parallel-phase1 \
"${OURS_EXTRA_ARGV[@]}" \ "${OURS_EXTRA_ARGV[@]}" \

View File

@ -46,6 +46,7 @@ REMOTE_REPO="$REMOTE_ROOT/repo"
REMOTE_OUT="$REMOTE_ROOT/rounds/$ROUND_ID/ours" REMOTE_OUT="$REMOTE_ROOT/rounds/$ROUND_ID/ours"
REMOTE_WORK_DB="$REMOTE_ROOT/state/ours/work-db" REMOTE_WORK_DB="$REMOTE_ROOT/state/ours/work-db"
REMOTE_RAW_STORE="$REMOTE_ROOT/state/ours/raw-store.db" REMOTE_RAW_STORE="$REMOTE_ROOT/state/ours/raw-store.db"
REMOTE_REPO_BYTES="$REMOTE_ROOT/state/ours/repo-bytes.db"
mkdir -p "$LOCAL_OUT" mkdir -p "$LOCAL_OUT"
@ -66,6 +67,7 @@ ssh "$SSH_TARGET" \
REMOTE_OUT="$REMOTE_OUT" \ REMOTE_OUT="$REMOTE_OUT" \
REMOTE_WORK_DB="$REMOTE_WORK_DB" \ REMOTE_WORK_DB="$REMOTE_WORK_DB" \
REMOTE_RAW_STORE="$REMOTE_RAW_STORE" \ REMOTE_RAW_STORE="$REMOTE_RAW_STORE" \
REMOTE_REPO_BYTES="$REMOTE_REPO_BYTES" \
KIND="$KIND" \ KIND="$KIND" \
ROUND_ID="$ROUND_ID" \ ROUND_ID="$ROUND_ID" \
SCHEDULED_AT="$SCHEDULED_AT" \ SCHEDULED_AT="$SCHEDULED_AT" \
@ -76,7 +78,7 @@ cd "$REMOTE_REPO"
mkdir -p "$REMOTE_OUT" mkdir -p "$REMOTE_OUT"
if [[ "$KIND" == "snapshot" ]]; then if [[ "$KIND" == "snapshot" ]]; then
rm -rf "$REMOTE_WORK_DB" "$REMOTE_RAW_STORE" rm -rf "$REMOTE_WORK_DB" "$REMOTE_RAW_STORE" "$REMOTE_REPO_BYTES"
fi fi
mkdir -p "$(dirname "$REMOTE_WORK_DB")" mkdir -p "$(dirname "$REMOTE_WORK_DB")"
@ -97,6 +99,7 @@ set +e
env RPKI_PROGRESS_LOG=1 RPKI_PROGRESS_SLOW_SECS=0 target/release/rpki \ env RPKI_PROGRESS_LOG=1 RPKI_PROGRESS_SLOW_SECS=0 target/release/rpki \
--db "$REMOTE_WORK_DB" \ --db "$REMOTE_WORK_DB" \
--raw-store-db "$REMOTE_RAW_STORE" \ --raw-store-db "$REMOTE_RAW_STORE" \
--repo-bytes-db "$REMOTE_REPO_BYTES" \
--tal-path tests/fixtures/tal/apnic-rfc7730-https.tal \ --tal-path tests/fixtures/tal/apnic-rfc7730-https.tal \
--ta-path tests/fixtures/ta/apnic-ta.cer \ --ta-path tests/fixtures/ta/apnic-ta.cer \
--parallel-phase1 \ --parallel-phase1 \

View File

@ -91,6 +91,7 @@ base_time = (
(run_root / "rounds").mkdir(parents=True, exist_ok=True) (run_root / "rounds").mkdir(parents=True, exist_ok=True)
(run_root / "state" / "ours" / "work-db").mkdir(parents=True, exist_ok=True) (run_root / "state" / "ours" / "work-db").mkdir(parents=True, exist_ok=True)
(run_root / "state" / "ours" / "raw-store.db").mkdir(parents=True, exist_ok=True) (run_root / "state" / "ours" / "raw-store.db").mkdir(parents=True, exist_ok=True)
(run_root / "state" / "ours" / "repo-bytes.db").mkdir(parents=True, exist_ok=True)
(run_root / "state" / "rpki-client" / "cache").mkdir(parents=True, exist_ok=True) (run_root / "state" / "rpki-client" / "cache").mkdir(parents=True, exist_ok=True)
(run_root / "state" / "rpki-client" / "out").mkdir(parents=True, exist_ok=True) (run_root / "state" / "rpki-client" / "out").mkdir(parents=True, exist_ok=True)
@ -110,8 +111,10 @@ meta = {
"ours": { "ours": {
"workDbPath": "state/ours/work-db", "workDbPath": "state/ours/work-db",
"rawStoreDbPath": "state/ours/raw-store.db", "rawStoreDbPath": "state/ours/raw-store.db",
"repoBytesDbPath": "state/ours/repo-bytes.db",
"remoteWorkDbPath": "state/ours/work-db", "remoteWorkDbPath": "state/ours/work-db",
"remoteRawStoreDbPath": "state/ours/raw-store.db", "remoteRawStoreDbPath": "state/ours/raw-store.db",
"remoteRepoBytesDbPath": "state/ours/repo-bytes.db",
}, },
"rpkiClient": { "rpkiClient": {
"cachePath": "state/rpki-client/cache", "cachePath": "state/rpki-client/cache",

View File

@ -46,6 +46,7 @@ REMOTE_REPO="$REMOTE_ROOT/repo"
REMOTE_OUT="$REMOTE_ROOT/rounds/$ROUND_ID/ours" REMOTE_OUT="$REMOTE_ROOT/rounds/$ROUND_ID/ours"
REMOTE_WORK_DB="$REMOTE_ROOT/state/ours/work-db" REMOTE_WORK_DB="$REMOTE_ROOT/state/ours/work-db"
REMOTE_RAW_STORE="$REMOTE_ROOT/state/ours/raw-store.db" REMOTE_RAW_STORE="$REMOTE_ROOT/state/ours/raw-store.db"
REMOTE_REPO_BYTES="$REMOTE_ROOT/state/ours/repo-bytes.db"
mkdir -p "$LOCAL_OUT" mkdir -p "$LOCAL_OUT"
@ -66,6 +67,7 @@ ssh "$SSH_TARGET" \
REMOTE_OUT="$REMOTE_OUT" \ REMOTE_OUT="$REMOTE_OUT" \
REMOTE_WORK_DB="$REMOTE_WORK_DB" \ REMOTE_WORK_DB="$REMOTE_WORK_DB" \
REMOTE_RAW_STORE="$REMOTE_RAW_STORE" \ REMOTE_RAW_STORE="$REMOTE_RAW_STORE" \
REMOTE_REPO_BYTES="$REMOTE_REPO_BYTES" \
KIND="$KIND" \ KIND="$KIND" \
ROUND_ID="$ROUND_ID" \ ROUND_ID="$ROUND_ID" \
SCHEDULED_AT="$SCHEDULED_AT" \ SCHEDULED_AT="$SCHEDULED_AT" \
@ -76,7 +78,7 @@ cd "$REMOTE_REPO"
mkdir -p "$REMOTE_OUT" mkdir -p "$REMOTE_OUT"
if [[ "$KIND" == "snapshot" ]]; then if [[ "$KIND" == "snapshot" ]]; then
rm -rf "$REMOTE_WORK_DB" "$REMOTE_RAW_STORE" rm -rf "$REMOTE_WORK_DB" "$REMOTE_RAW_STORE" "$REMOTE_REPO_BYTES"
fi fi
mkdir -p "$(dirname "$REMOTE_WORK_DB")" mkdir -p "$(dirname "$REMOTE_WORK_DB")"
@ -97,6 +99,7 @@ set +e
env RPKI_PROGRESS_LOG=1 target/release/rpki \ env RPKI_PROGRESS_LOG=1 target/release/rpki \
--db "$REMOTE_WORK_DB" \ --db "$REMOTE_WORK_DB" \
--raw-store-db "$REMOTE_RAW_STORE" \ --raw-store-db "$REMOTE_RAW_STORE" \
--repo-bytes-db "$REMOTE_REPO_BYTES" \
--tal-path tests/fixtures/tal/arin.tal \ --tal-path tests/fixtures/tal/arin.tal \
--ta-path tests/fixtures/ta/arin-ta.cer \ --ta-path tests/fixtures/ta/arin-ta.cer \
--ccr-out "$ccr_out" \ --ccr-out "$ccr_out" \

View File

@ -583,12 +583,14 @@ mod tests {
); );
assert!(trace.source_object_raw.raw_present); assert!(trace.source_object_raw.raw_present);
assert!(trace.source_ee_cert_raw.raw_present); assert!(trace.source_ee_cert_raw.raw_present);
assert!(trace.chain_leaf_to_root[0] assert!(
trace.chain_leaf_to_root[0]
.related_artifacts .related_artifacts
.iter() .iter()
.any(|artifact| { .any(|artifact| {
artifact.uri.as_deref() == Some(leaf_manifest) && artifact.raw.raw_present artifact.uri.as_deref() == Some(leaf_manifest) && artifact.raw.raw_present
})); })
);
} }
#[test] #[test]

View File

@ -1,13 +1,13 @@
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf; use std::path::PathBuf;
use rpki::blob_store::{ExternalRawStoreDb, RawObjectStore}; use rpki::blob_store::ExternalRepoBytesDb;
use rpki::bundle::decode_ccr_compare_views; use rpki::bundle::decode_ccr_compare_views;
use rpki::ccr::decode_content_info; use rpki::ccr::decode_content_info;
use rpki::cir::{decode_cir, resolve_static_pool_file}; use rpki::cir::decode_cir;
use rpki::data_model::roa::RoaObject; use rpki::data_model::roa::RoaObject;
const USAGE: &str = "Usage: cir_drop_report --cir <path> --ccr <path> --report-json <path> (--static-root <path> | --raw-store-db <path>) --json-out <path> --md-out <path>"; const USAGE: &str = "Usage: cir_drop_report --cir <path> --ccr <path> --report-json <path> --repo-bytes-db <path> --json-out <path> --md-out <path>";
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
struct DroppedObjectRecord { struct DroppedObjectRecord {
@ -50,23 +50,11 @@ fn classify_reason(detail: Option<&str>, result: &str) -> String {
fn parse_args( fn parse_args(
argv: &[String], argv: &[String],
) -> Result< ) -> Result<(PathBuf, PathBuf, PathBuf, PathBuf, PathBuf, PathBuf), String> {
(
PathBuf,
PathBuf,
PathBuf,
Option<PathBuf>,
Option<PathBuf>,
PathBuf,
PathBuf,
),
String,
> {
let mut cir = None; let mut cir = None;
let mut ccr = None; let mut ccr = None;
let mut report = None; let mut report = None;
let mut static_root = None; let mut repo_bytes_db = None;
let mut raw_store_db = None;
let mut json_out = None; let mut json_out = None;
let mut md_out = None; let mut md_out = None;
let mut i = 1usize; let mut i = 1usize;
@ -86,16 +74,10 @@ fn parse_args(
argv.get(i).ok_or("--report-json requires a value")?, argv.get(i).ok_or("--report-json requires a value")?,
)); ));
} }
"--static-root" => { "--repo-bytes-db" => {
i += 1; i += 1;
static_root = Some(PathBuf::from( repo_bytes_db = Some(PathBuf::from(
argv.get(i).ok_or("--static-root requires a value")?, argv.get(i).ok_or("--repo-bytes-db requires a value")?,
));
}
"--raw-store-db" => {
i += 1;
raw_store_db = Some(PathBuf::from(
argv.get(i).ok_or("--raw-store-db requires a value")?,
)); ));
} }
"--json-out" => { "--json-out" => {
@ -119,8 +101,7 @@ fn parse_args(
cir.ok_or_else(|| format!("--cir is required\n\n{USAGE}"))?, cir.ok_or_else(|| format!("--cir is required\n\n{USAGE}"))?,
ccr.ok_or_else(|| format!("--ccr is required\n\n{USAGE}"))?, ccr.ok_or_else(|| format!("--ccr is required\n\n{USAGE}"))?,
report.ok_or_else(|| format!("--report-json is required\n\n{USAGE}"))?, report.ok_or_else(|| format!("--report-json is required\n\n{USAGE}"))?,
static_root, repo_bytes_db.ok_or_else(|| format!("--repo-bytes-db is required\n\n{USAGE}"))?,
raw_store_db,
json_out.ok_or_else(|| format!("--json-out is required\n\n{USAGE}"))?, json_out.ok_or_else(|| format!("--json-out is required\n\n{USAGE}"))?,
md_out.ok_or_else(|| format!("--md-out is required\n\n{USAGE}"))?, md_out.ok_or_else(|| format!("--md-out is required\n\n{USAGE}"))?,
)) ))
@ -128,14 +109,7 @@ fn parse_args(
fn main() -> Result<(), String> { fn main() -> Result<(), String> {
let argv: Vec<String> = std::env::args().collect(); let argv: Vec<String> = std::env::args().collect();
let (cir_path, ccr_path, report_path, static_root, raw_store_db, json_out, md_out) = let (cir_path, ccr_path, report_path, repo_bytes_db, json_out, md_out) = parse_args(&argv)?;
parse_args(&argv)?;
let backend_count = static_root.is_some() as u8 + raw_store_db.is_some() as u8;
if backend_count != 1 {
return Err(format!(
"must specify exactly one of --static-root or --raw-store-db\n\n{USAGE}"
));
}
let cir = decode_cir(&std::fs::read(&cir_path).map_err(|e| format!("read cir failed: {e}"))?) let cir = decode_cir(&std::fs::read(&cir_path).map_err(|e| format!("read cir failed: {e}"))?)
.map_err(|e| format!("decode cir failed: {e}"))?; .map_err(|e| format!("decode cir failed: {e}"))?;
@ -149,6 +123,8 @@ fn main() -> Result<(), String> {
&std::fs::read(&report_path).map_err(|e| format!("read report failed: {e}"))?, &std::fs::read(&report_path).map_err(|e| format!("read report failed: {e}"))?,
) )
.map_err(|e| format!("parse report failed: {e}"))?; .map_err(|e| format!("parse report failed: {e}"))?;
let repo_bytes = ExternalRepoBytesDb::open(&repo_bytes_db)
.map_err(|e| format!("open repo bytes db failed: {e}"))?;
let mut object_hash_by_uri = BTreeMap::new(); let mut object_hash_by_uri = BTreeMap::new();
for object in &cir.objects { for object in &cir.objects {
@ -189,18 +165,7 @@ fn main() -> Result<(), String> {
let mut derived_vrp_count = 0usize; let mut derived_vrp_count = 0usize;
if kind == "roa" && !hash.is_empty() { if kind == "roa" && !hash.is_empty() {
let bytes_opt = if let Some(static_root) = static_root.as_ref() { let bytes_opt = repo_bytes.get_blob_bytes(&hash).ok().flatten();
match resolve_static_pool_file(static_root, &hash) {
Ok(path) => std::fs::read(&path).ok(),
Err(_) => None,
}
} else if let Some(raw_store_db) = raw_store_db.as_ref() {
ExternalRawStoreDb::open(raw_store_db)
.ok()
.and_then(|store| store.get_blob_bytes(&hash).ok().flatten())
} else {
None
};
match bytes_opt { match bytes_opt {
Some(bytes) => { Some(bytes) => {
if let Ok(roa) = RoaObject::decode_der(&bytes) { if let Ok(roa) = RoaObject::decode_der(&bytes) {

View File

@ -1,7 +1,7 @@
use std::path::PathBuf; use std::path::PathBuf;
fn usage() -> &'static str { fn usage() -> &'static str {
"Usage: cir_materialize --cir <path> (--repo-bytes-db <path> | --raw-store-db <path> | --static-root <path>) --mirror-root <path> [--keep-db]" "Usage: cir_materialize --cir <path> --repo-bytes-db <path> --mirror-root <path> [--keep-db]"
} }
fn main() { fn main() {
@ -13,9 +13,7 @@ fn main() {
fn run(argv: Vec<String>) -> Result<(), String> { fn run(argv: Vec<String>) -> Result<(), String> {
let mut cir_path: Option<PathBuf> = None; let mut cir_path: Option<PathBuf> = None;
let mut static_root: Option<PathBuf> = None;
let mut repo_bytes_db: Option<PathBuf> = None; let mut repo_bytes_db: Option<PathBuf> = None;
let mut raw_store_db: Option<PathBuf> = None;
let mut mirror_root: Option<PathBuf> = None; let mut mirror_root: Option<PathBuf> = None;
let mut keep_db = false; let mut keep_db = false;
@ -27,24 +25,12 @@ fn run(argv: Vec<String>) -> Result<(), String> {
i += 1; i += 1;
cir_path = Some(PathBuf::from(argv.get(i).ok_or("--cir requires a value")?)); cir_path = Some(PathBuf::from(argv.get(i).ok_or("--cir requires a value")?));
} }
"--static-root" => {
i += 1;
static_root = Some(PathBuf::from(
argv.get(i).ok_or("--static-root requires a value")?,
));
}
"--repo-bytes-db" => { "--repo-bytes-db" => {
i += 1; i += 1;
repo_bytes_db = Some(PathBuf::from( repo_bytes_db = Some(PathBuf::from(
argv.get(i).ok_or("--repo-bytes-db requires a value")?, argv.get(i).ok_or("--repo-bytes-db requires a value")?,
)); ));
} }
"--raw-store-db" => {
i += 1;
raw_store_db = Some(PathBuf::from(
argv.get(i).ok_or("--raw-store-db requires a value")?,
));
}
"--mirror-root" => { "--mirror-root" => {
i += 1; i += 1;
mirror_root = Some(PathBuf::from( mirror_root = Some(PathBuf::from(
@ -58,33 +44,18 @@ fn run(argv: Vec<String>) -> Result<(), String> {
} }
let cir_path = cir_path.ok_or_else(|| format!("--cir is required\n\n{}", usage()))?; let cir_path = cir_path.ok_or_else(|| format!("--cir is required\n\n{}", usage()))?;
let repo_bytes_db =
repo_bytes_db.ok_or_else(|| format!("--repo-bytes-db is required\n\n{}", usage()))?;
let mirror_root = let mirror_root =
mirror_root.ok_or_else(|| format!("--mirror-root is required\n\n{}", usage()))?; mirror_root.ok_or_else(|| format!("--mirror-root is required\n\n{}", usage()))?;
let backend_count =
static_root.is_some() as u8 + raw_store_db.is_some() as u8 + repo_bytes_db.is_some() as u8;
if backend_count != 1 {
return Err(format!(
"must specify exactly one of --repo-bytes-db, --raw-store-db or --static-root\n\n{}",
usage()
));
}
let bytes = std::fs::read(&cir_path) let bytes = std::fs::read(&cir_path)
.map_err(|e| format!("read CIR failed: {}: {e}", cir_path.display()))?; .map_err(|e| format!("read CIR failed: {}: {e}", cir_path.display()))?;
let cir = rpki::cir::decode_cir(&bytes).map_err(|e| e.to_string())?; let cir = rpki::cir::decode_cir(&bytes).map_err(|e| e.to_string())?;
let result = if let Some(static_root) = static_root { let result =
rpki::cir::materialize_cir(&cir, &static_root, &mirror_root, true)
.map_err(|e| e.to_string())
} else if let Some(repo_bytes_db) = repo_bytes_db {
rpki::cir::materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true) rpki::cir::materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true)
.map_err(|e| e.to_string()) .map_err(|e| e.to_string());
} else if let Some(raw_store_db) = raw_store_db {
rpki::cir::materialize_cir_from_raw_store(&cir, &raw_store_db, &mirror_root, true)
.map_err(|e| e.to_string())
} else {
unreachable!("validated backend count")
};
match result { match result {
Ok(summary) => { Ok(summary) => {
eprintln!( eprintln!(

View File

@ -1,12 +1,12 @@
use std::path::PathBuf; use std::path::PathBuf;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::cir::{ use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
write_bytes_to_static_pool,
}; };
use sha2::Digest; use sha2::Digest;
const USAGE: &str = "Usage: cir_ta_only_fixture --tal-path <path> --ta-path <path> --tal-uri <url> --validation-time <rfc3339> --cir-out <path> --static-root <path>"; const USAGE: &str = "Usage: cir_ta_only_fixture --tal-path <path> --ta-path <path> --tal-uri <url> --validation-time <rfc3339> --cir-out <path> --repo-bytes-db <path>";
fn parse_args( fn parse_args(
argv: &[String], argv: &[String],
@ -26,7 +26,7 @@ fn parse_args(
let mut tal_uri = None; let mut tal_uri = None;
let mut validation_time = None; let mut validation_time = None;
let mut cir_out = None; let mut cir_out = None;
let mut static_root = None; let mut repo_bytes_db = None;
let mut i = 1usize; let mut i = 1usize;
while i < argv.len() { while i < argv.len() {
match argv[i].as_str() { match argv[i].as_str() {
@ -63,10 +63,10 @@ fn parse_args(
argv.get(i).ok_or("--cir-out requires a value")?, argv.get(i).ok_or("--cir-out requires a value")?,
)); ));
} }
"--static-root" => { "--repo-bytes-db" => {
i += 1; i += 1;
static_root = Some(PathBuf::from( repo_bytes_db = Some(PathBuf::from(
argv.get(i).ok_or("--static-root requires a value")?, argv.get(i).ok_or("--repo-bytes-db requires a value")?,
)); ));
} }
"-h" | "--help" => return Err(USAGE.to_string()), "-h" | "--help" => return Err(USAGE.to_string()),
@ -80,13 +80,13 @@ fn parse_args(
tal_uri.ok_or_else(|| format!("--tal-uri is required\n\n{USAGE}"))?, tal_uri.ok_or_else(|| format!("--tal-uri is required\n\n{USAGE}"))?,
validation_time.ok_or_else(|| format!("--validation-time is required\n\n{USAGE}"))?, validation_time.ok_or_else(|| format!("--validation-time is required\n\n{USAGE}"))?,
cir_out.ok_or_else(|| format!("--cir-out is required\n\n{USAGE}"))?, cir_out.ok_or_else(|| format!("--cir-out is required\n\n{USAGE}"))?,
static_root.ok_or_else(|| format!("--static-root is required\n\n{USAGE}"))?, repo_bytes_db.ok_or_else(|| format!("--repo-bytes-db is required\n\n{USAGE}"))?,
)) ))
} }
fn main() -> Result<(), String> { fn main() -> Result<(), String> {
let argv: Vec<String> = std::env::args().collect(); let argv: Vec<String> = std::env::args().collect();
let (tal_path, ta_path, tal_uri, validation_time, cir_out, static_root) = parse_args(&argv)?; let (tal_path, ta_path, tal_uri, validation_time, cir_out, repo_bytes_db) = parse_args(&argv)?;
let tal_bytes = std::fs::read(&tal_path).map_err(|e| format!("read tal failed: {e}"))?; let tal_bytes = std::fs::read(&tal_path).map_err(|e| format!("read tal failed: {e}"))?;
let ta_bytes = std::fs::read(&ta_path).map_err(|e| format!("read ta failed: {e}"))?; let ta_bytes = std::fs::read(&ta_path).map_err(|e| format!("read ta failed: {e}"))?;
@ -102,8 +102,10 @@ fn main() -> Result<(), String> {
let sha = sha2::Sha256::digest(&ta_bytes); let sha = sha2::Sha256::digest(&ta_bytes);
let hash_hex = hex::encode(sha); let hash_hex = hex::encode(sha);
write_bytes_to_static_pool(&static_root, validation_time.date(), &hash_hex, &ta_bytes) ExternalRepoBytesDb::open(&repo_bytes_db)
.map_err(|e| format!("write static pool failed: {e}"))?; .map_err(|e| format!("open repo bytes db failed: {e}"))?
.put_blob_bytes_batch(&[(hash_hex, ta_bytes.clone())])
.map_err(|e| format!("write repo bytes db failed: {e}"))?;
let cir = CanonicalInputRepresentation { let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1, version: CIR_VERSION_V1,

View File

@ -737,25 +737,47 @@ mod tests {
if skip_heavy_blackbox_test() { if skip_heavy_blackbox_test() {
return; return;
} }
let tal_path = PathBuf::from("tests/fixtures/tal/apnic-rfc7730-https.tal");
let ta_path = PathBuf::from("tests/fixtures/ta/apnic-ta.cer");
let replay_archive = PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/base-payload-archive",
);
let replay_locks = PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/base-locks.json",
);
let delta_archive = PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/payload-delta-archive",
);
let delta_locks = PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/locks-delta.json",
);
let required = [
tal_path.as_path(),
ta_path.as_path(),
replay_archive.as_path(),
replay_locks.as_path(),
delta_archive.as_path(),
delta_locks.as_path(),
];
if let Some(missing) = required.iter().find(|path| !path.exists()) {
eprintln!(
"skipping replay_bundle_record smoke test; fixture missing: {}",
missing.display()
);
return;
}
let dir = tempdir().expect("tempdir"); let dir = tempdir().expect("tempdir");
let out_dir = dir.path().join("bundle"); let out_dir = dir.path().join("bundle");
let out = run(Args { let out = run(Args {
rir: Some("apnic".to_string()), rir: Some("apnic".to_string()),
out_dir: Some(out_dir.clone()), out_dir: Some(out_dir.clone()),
tal_path: Some(PathBuf::from("tests/fixtures/tal/apnic-rfc7730-https.tal")), tal_path: Some(tal_path),
ta_path: Some(PathBuf::from("tests/fixtures/ta/apnic-ta.cer")), ta_path: Some(ta_path),
payload_replay_archive: Some(PathBuf::from( payload_replay_archive: Some(replay_archive),
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/base-payload-archive", payload_replay_locks: Some(replay_locks),
)), payload_delta_archive: Some(delta_archive),
payload_replay_locks: Some(PathBuf::from( payload_delta_locks: Some(delta_locks),
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/base-locks.json",
)),
payload_delta_archive: Some(PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/payload-delta-archive",
)),
payload_delta_locks: Some(PathBuf::from(
"/home/yuyr/dev/rust_playground/routinator/bench/multi_rir_demo/runs/20260316-112341-multi-final3/apnic/locks-delta.json",
)),
validation_time: None, validation_time: None,
max_depth: Some(0), max_depth: Some(0),
max_instances: Some(1), max_instances: Some(1),

View File

@ -7,6 +7,7 @@ use crate::storage::{RawByHashEntry, RocksStore, StorageError, StorageResult};
const RAW_BY_HASH_KEY_PREFIX: &str = "rawbyhash:"; const RAW_BY_HASH_KEY_PREFIX: &str = "rawbyhash:";
const RAW_BLOB_KEY_PREFIX: &str = "rawblob:"; const RAW_BLOB_KEY_PREFIX: &str = "rawblob:";
const REPO_BYTES_KEY_PREFIX: &str = "sha256:";
fn raw_by_hash_key(sha256_hex: &str) -> String { fn raw_by_hash_key(sha256_hex: &str) -> String {
format!("{RAW_BY_HASH_KEY_PREFIX}{sha256_hex}") format!("{RAW_BY_HASH_KEY_PREFIX}{sha256_hex}")
@ -16,6 +17,10 @@ fn raw_blob_key(sha256_hex: &str) -> String {
format!("{RAW_BLOB_KEY_PREFIX}{sha256_hex}") format!("{RAW_BLOB_KEY_PREFIX}{sha256_hex}")
} }
fn repo_bytes_key(sha256_hex: &str) -> String {
format!("{REPO_BYTES_KEY_PREFIX}{sha256_hex}")
}
fn validate_blob_sha256_hex(sha256_hex: &str) -> StorageResult<()> { fn validate_blob_sha256_hex(sha256_hex: &str) -> StorageResult<()> {
if sha256_hex.len() != 64 || !sha256_hex.as_bytes().iter().all(u8::is_ascii_hexdigit) { if sha256_hex.len() != 64 || !sha256_hex.as_bytes().iter().all(u8::is_ascii_hexdigit) {
return Err(StorageError::InvalidData { return Err(StorageError::InvalidData {
@ -65,6 +70,12 @@ pub struct ExternalRawStoreDb {
db: Arc<DB>, db: Arc<DB>,
} }
#[derive(Clone, Debug)]
pub struct ExternalRepoBytesDb {
path: PathBuf,
db: Arc<DB>,
}
impl ExternalRawStoreDb { impl ExternalRawStoreDb {
pub fn open(path: impl Into<PathBuf>) -> StorageResult<Self> { pub fn open(path: impl Into<PathBuf>) -> StorageResult<Self> {
let path = path.into(); let path = path.into();
@ -158,6 +169,73 @@ impl ExternalRawStoreDb {
} }
} }
impl ExternalRepoBytesDb {
pub fn open(path: impl Into<PathBuf>) -> StorageResult<Self> {
let path = path.into();
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).map_err(|e| StorageError::RocksDb(e.to_string()))?;
}
let mut opts = Options::default();
opts.create_if_missing(true);
opts.set_compression_type(rocksdb::DBCompressionType::Lz4);
let db = DB::open(&opts, &path).map_err(|e| StorageError::RocksDb(e.to_string()))?;
Ok(Self {
path,
db: Arc::new(db),
})
}
pub fn put_blob_bytes_batch(&self, blobs: &[(String, Vec<u8>)]) -> StorageResult<()> {
if blobs.is_empty() {
return Ok(());
}
let mut batch = WriteBatch::default();
for (sha256_hex, bytes) in blobs {
validate_blob_sha256_hex(sha256_hex)?;
validate_blob_bytes(bytes)?;
let key = repo_bytes_key(sha256_hex);
batch.put(key.as_bytes(), bytes.as_slice());
}
self.db
.write(batch)
.map_err(|e| StorageError::RocksDb(e.to_string()))?;
Ok(())
}
pub fn get_blob_bytes(&self, sha256_hex: &str) -> StorageResult<Option<Vec<u8>>> {
validate_blob_sha256_hex(sha256_hex)?;
let key = repo_bytes_key(sha256_hex);
self.db
.get(key.as_bytes())
.map_err(|e| StorageError::RocksDb(e.to_string()))
}
pub fn get_blob_bytes_batch(
&self,
sha256_hexes: &[String],
) -> StorageResult<Vec<Option<Vec<u8>>>> {
if sha256_hexes.is_empty() {
return Ok(Vec::new());
}
let keys: Vec<String> = sha256_hexes
.iter()
.map(|hash| {
validate_blob_sha256_hex(hash)?;
Ok::<String, StorageError>(repo_bytes_key(hash))
})
.collect::<Result<_, _>>()?;
self.db
.multi_get(keys.iter().map(|key| key.as_bytes()))
.into_iter()
.map(|res| res.map_err(|e| StorageError::RocksDb(e.to_string())))
.collect()
}
pub fn path(&self) -> &PathBuf {
&self.path
}
}
impl RawObjectStore for RocksStore { impl RawObjectStore for RocksStore {
fn get_raw_entry(&self, sha256_hex: &str) -> StorageResult<Option<RawByHashEntry>> { fn get_raw_entry(&self, sha256_hex: &str) -> StorageResult<Option<RawByHashEntry>> {
self.get_raw_by_hash_entry(sha256_hex) self.get_raw_by_hash_entry(sha256_hex)
@ -254,7 +332,7 @@ impl RawObjectStore for ExternalRawStoreDb {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{ExternalRawStoreDb, RawObjectStore}; use super::{ExternalRawStoreDb, ExternalRepoBytesDb, RawObjectStore};
use crate::storage::{RawByHashEntry, RocksStore, StorageError, StorageResult}; use crate::storage::{RawByHashEntry, RocksStore, StorageError, StorageResult};
use std::collections::HashMap; use std::collections::HashMap;
@ -551,4 +629,47 @@ mod tests {
.put_raw_entries_batch(&[]) .put_raw_entries_batch(&[])
.expect("empty put succeeds"); .expect("empty put succeeds");
} }
#[test]
fn external_repo_bytes_db_roundtrips_blob_bytes_without_raw_entry() {
let td = tempfile::tempdir().expect("tempdir");
let repo_bytes =
ExternalRepoBytesDb::open(td.path().join("repo-bytes.db")).expect("open repo bytes");
let bytes = b"repo-bytes-object".to_vec();
let hash = sha256_hex(&bytes);
repo_bytes
.put_blob_bytes_batch(&[(hash.clone(), bytes.clone())])
.expect("put repo bytes");
assert_eq!(
repo_bytes.get_blob_bytes(&hash).expect("get repo bytes"),
Some(bytes.clone())
);
assert_eq!(
repo_bytes
.get_blob_bytes_batch(&[hash, "00".repeat(32)])
.expect("get repo bytes batch"),
vec![Some(bytes), None]
);
}
#[test]
fn external_repo_bytes_db_rejects_invalid_inputs() {
let td = tempfile::tempdir().expect("tempdir");
let repo_bytes =
ExternalRepoBytesDb::open(td.path().join("repo-bytes.db")).expect("open repo bytes");
assert!(
repo_bytes
.put_blob_bytes_batch(&[("not-a-valid-hash".to_string(), b"blob".to_vec())])
.is_err()
);
assert!(
repo_bytes
.put_blob_bytes_batch(&[(sha256_hex(b"blob"), Vec::new())])
.is_err()
);
assert!(repo_bytes.get_blob_bytes("not-a-valid-hash").is_err());
}
} }

View File

@ -1,7 +1,7 @@
use crate::ccr::model::{ use crate::ccr::model::{
AspaPayloadSet, AspaPayloadState, CcrContentInfo, CcrDigestAlgorithm, ManifestInstance, AspaPayloadSet, AspaPayloadState, CCR_VERSION_V0, CcrContentInfo, CcrDigestAlgorithm,
ManifestState, RoaPayloadSet, RoaPayloadState, RouterKey, RouterKeySet, RouterKeyState, ManifestInstance, ManifestState, RoaPayloadSet, RoaPayloadState, RouterKey, RouterKeySet,
RpkiCanonicalCacheRepresentation, TrustAnchorState, CCR_VERSION_V0, RouterKeyState, RpkiCanonicalCacheRepresentation, TrustAnchorState,
}; };
use crate::data_model::common::{BigUnsigned, DerReader}; use crate::data_model::common::{BigUnsigned, DerReader};
use crate::data_model::oid::{OID_CT_RPKI_CCR, OID_CT_RPKI_CCR_RAW, OID_SHA256, OID_SHA256_RAW}; use crate::data_model::oid::{OID_CT_RPKI_CCR, OID_CT_RPKI_CCR_RAW, OID_SHA256, OID_SHA256_RAW};

View File

@ -1,4 +1,4 @@
use crate::ccr::decode::{decode_content_info, CcrDecodeError}; use crate::ccr::decode::{CcrDecodeError, decode_content_info};
use serde_json::json; use serde_json::json;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]

View File

@ -1,7 +1,7 @@
use crate::ccr::model::{ use crate::ccr::model::{
AspaPayloadSet, AspaPayloadState, CcrContentInfo, CcrDigestAlgorithm, ManifestInstance, AspaPayloadSet, AspaPayloadState, CCR_VERSION_V0, CcrContentInfo, CcrDigestAlgorithm,
ManifestState, RoaPayloadSet, RoaPayloadState, RouterKey, RouterKeySet, RouterKeyState, ManifestInstance, ManifestState, RoaPayloadSet, RoaPayloadState, RouterKey, RouterKeySet,
RpkiCanonicalCacheRepresentation, TrustAnchorState, CCR_VERSION_V0, RouterKeyState, RpkiCanonicalCacheRepresentation, TrustAnchorState,
}; };
use crate::data_model::common::BigUnsigned; use crate::data_model::common::BigUnsigned;
use crate::data_model::oid::{OID_CT_RPKI_CCR_RAW, OID_SHA256_RAW}; use crate::data_model::oid::{OID_CT_RPKI_CCR_RAW, OID_SHA256_RAW};

View File

@ -1,9 +1,9 @@
use crate::ccr::build::{ use crate::ccr::build::{
build_aspa_payload_state, build_manifest_state_from_vcirs_with_breakdown, CcrBuildError, ManifestStateBuildBreakdown, build_aspa_payload_state,
build_roa_payload_state, build_router_key_state_from_runtime, build_trust_anchor_state, build_manifest_state_from_vcirs_with_breakdown, build_roa_payload_state,
CcrBuildError, ManifestStateBuildBreakdown, build_router_key_state_from_runtime, build_trust_anchor_state,
}; };
use crate::ccr::encode::{encode_content_info, CcrEncodeError}; use crate::ccr::encode::{CcrEncodeError, encode_content_info};
use crate::ccr::model::{CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation}; use crate::ccr::model::{CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation};
use crate::data_model::ta::TrustAnchor; use crate::data_model::ta::TrustAnchor;
use crate::storage::RocksStore; use crate::storage::RocksStore;

View File

@ -16,18 +16,17 @@ pub mod verify;
pub use accumulator::{CcrAccumulator, CcrManifestContribution}; pub use accumulator::{CcrAccumulator, CcrManifestContribution};
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub use build::{ pub use build::{
build_aspa_payload_state, build_manifest_state_from_vcirs, CcrBuildError, ManifestStateBuildBreakdown, build_aspa_payload_state,
build_manifest_state_from_vcirs_with_breakdown, build_roa_payload_state, build_manifest_state_from_vcirs, build_manifest_state_from_vcirs_with_breakdown,
build_router_key_state_from_runtime, build_trust_anchor_state, CcrBuildError, build_roa_payload_state, build_router_key_state_from_runtime, build_trust_anchor_state,
ManifestStateBuildBreakdown,
}; };
pub use decode::{decode_content_info, CcrDecodeError}; pub use decode::{CcrDecodeError, decode_content_info};
pub use dump::{dump_content_info_json, dump_content_info_json_value, CcrDumpError}; pub use dump::{CcrDumpError, dump_content_info_json, dump_content_info_json_value};
pub use encode::{encode_content_info, CcrEncodeError}; pub use encode::{CcrEncodeError, encode_content_info};
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub use export::{ pub use export::{
build_ccr_from_run, build_ccr_from_run_with_breakdown, write_ccr_file, CcrBuildBreakdown, CcrBuildBreakdown, CcrExportError, build_ccr_from_run, build_ccr_from_run_with_breakdown,
CcrExportError, write_ccr_file,
}; };
pub use hash::{compute_state_hash, verify_state_hash}; pub use hash::{compute_state_hash, verify_state_hash};
pub use model::{ pub use model::{
@ -37,7 +36,7 @@ pub use model::{
}; };
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub use verify::{ pub use verify::{
extract_vrp_rows, verify_against_report_json_path, verify_against_vcir_store, CcrVerifyError, CcrVerifySummary, extract_vrp_rows, verify_against_report_json_path,
verify_against_vcir_store_path, verify_content_info, verify_content_info_bytes, CcrVerifyError, verify_against_vcir_store, verify_against_vcir_store_path, verify_content_info,
CcrVerifySummary, verify_content_info_bytes,
}; };

View File

@ -1,4 +1,4 @@
use crate::data_model::common::{der_take_tlv, BigUnsigned}; use crate::data_model::common::{BigUnsigned, der_take_tlv};
use crate::data_model::oid::{OID_CT_RPKI_CCR, OID_SHA256}; use crate::data_model::oid::{OID_CT_RPKI_CCR, OID_SHA256};
pub const CCR_VERSION_V0: u32 = 0; pub const CCR_VERSION_V0: u32 = 0;

View File

@ -1,4 +1,4 @@
use crate::ccr::decode::{decode_content_info, CcrDecodeError}; use crate::ccr::decode::{CcrDecodeError, decode_content_info};
use crate::ccr::encode::{ use crate::ccr::encode::{
encode_aspa_payload_state_payload_der, encode_manifest_state_payload_der, encode_aspa_payload_state_payload_der, encode_manifest_state_payload_der,
encode_roa_payload_state_payload_der, encode_router_key_state_payload_der, encode_roa_payload_state_payload_der, encode_router_key_state_payload_der,

View File

@ -44,6 +44,9 @@ pub enum CirExportError {
#[error("write CIR file failed: {0}: {1}")] #[error("write CIR file failed: {0}: {1}")]
Write(String, String), Write(String, String),
#[error("write CIR trust anchor bytes to repo store failed: {0}")]
WriteRepoBytes(String),
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -349,6 +352,19 @@ pub fn export_cir_from_run_multi(
)?; )?;
let build_cir_ms = started.elapsed().as_millis() as u64; let build_cir_ms = started.elapsed().as_millis() as u64;
let ta_blobs = tal_bindings
.iter()
.map(|binding| {
(
ta_sha256_hex(&binding.trust_anchor.ta_certificate.raw_der),
binding.trust_anchor.ta_certificate.raw_der.clone(),
)
})
.collect::<Vec<_>>();
store
.put_blob_bytes_batch(&ta_blobs)
.map_err(|e| CirExportError::WriteRepoBytes(e.to_string()))?;
let started = std::time::Instant::now(); let started = std::time::Instant::now();
write_cir_file(cir_out, &cir)?; write_cir_file(cir_out, &cir)?;
let write_cir_ms = started.elapsed().as_millis() as u64; let write_cir_ms = started.elapsed().as_millis() as u64;
@ -548,6 +564,35 @@ mod tests {
assert!(cir_path.exists()); assert!(cir_path.exists());
} }
#[test]
fn export_cir_from_run_writes_ta_bytes_to_repo_bytes_store() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open_with_external_repo_bytes(
&td.path().join("db"),
&td.path().join("repo-bytes.db"),
)
.unwrap();
let ta = sample_trust_anchor();
let ta_hash = sha256_hex(&ta.ta_certificate.raw_der);
let cir_path = td.path().join("out").join("example.cir");
export_cir_from_run(
&store,
&ta,
"https://example.test/root.tal",
sample_time(),
&[],
&cir_path,
sample_date(),
)
.expect("export cir");
assert_eq!(
store.get_blob_bytes(&ta_hash).unwrap(),
Some(ta.ta_certificate.raw_der.clone())
);
}
#[test] #[test]
fn build_cir_from_run_includes_vcir_current_instance_objects_from_audit() { fn build_cir_from_run_includes_vcir_current_instance_objects_from_audit() {
let td = tempfile::tempdir().unwrap(); let td = tempfile::tempdir().unwrap();

View File

@ -1,7 +1,7 @@
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::blob_store::{ExternalRawStoreDb, RawObjectStore}; use crate::blob_store::{ExternalRawStoreDb, ExternalRepoBytesDb, RawObjectStore};
use crate::cir::model::CanonicalInputRepresentation; use crate::cir::model::CanonicalInputRepresentation;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -52,6 +52,15 @@ pub enum CirMaterializeError {
#[error("read raw store failed for sha256={sha256_hex}: {detail}")] #[error("read raw store failed for sha256={sha256_hex}: {detail}")]
ReadRawStore { sha256_hex: String, detail: String }, ReadRawStore { sha256_hex: String, detail: String },
#[error("open repo bytes store failed: {path}: {detail}")]
OpenRepoBytesStore { path: String, detail: String },
#[error("repo bytes object not found for sha256={sha256_hex}")]
MissingRepoBytesObject { sha256_hex: String },
#[error("read repo bytes store failed for sha256={sha256_hex}: {detail}")]
ReadRepoBytesStore { sha256_hex: String, detail: String },
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -239,10 +248,11 @@ pub fn materialize_cir_from_repo_bytes(
detail: e.to_string(), detail: e.to_string(),
})?; })?;
let repo_bytes = let repo_bytes = ExternalRepoBytesDb::open(repo_bytes_db).map_err(|e| {
ExternalRawStoreDb::open(repo_bytes_db).map_err(|e| CirMaterializeError::OpenRawStore { CirMaterializeError::OpenRepoBytesStore {
path: repo_bytes_db.display().to_string(), path: repo_bytes_db.display().to_string(),
detail: e.to_string(), detail: e.to_string(),
}
})?; })?;
let mut copied_files = 0usize; let mut copied_files = 0usize;
@ -250,11 +260,11 @@ pub fn materialize_cir_from_repo_bytes(
let sha256_hex = hex::encode(&object.sha256); let sha256_hex = hex::encode(&object.sha256);
let bytes = repo_bytes let bytes = repo_bytes
.get_blob_bytes(&sha256_hex) .get_blob_bytes(&sha256_hex)
.map_err(|e| CirMaterializeError::ReadRawStore { .map_err(|e| CirMaterializeError::ReadRepoBytesStore {
sha256_hex: sha256_hex.clone(), sha256_hex: sha256_hex.clone(),
detail: e.to_string(), detail: e.to_string(),
})? })?
.ok_or_else(|| CirMaterializeError::MissingRawStoreObject { .ok_or_else(|| CirMaterializeError::MissingRepoBytesObject {
sha256_hex: sha256_hex.clone(), sha256_hex: sha256_hex.clone(),
})?; })?;
let relative = mirror_relative_path_for_rsync_uri(&object.rsync_uri)?; let relative = mirror_relative_path_for_rsync_uri(&object.rsync_uri)?;
@ -403,7 +413,7 @@ mod tests {
materialize_cir_from_repo_bytes, mirror_relative_path_for_rsync_uri, materialize_cir_from_repo_bytes, mirror_relative_path_for_rsync_uri,
resolve_static_pool_file, resolve_static_pool_file,
}; };
use crate::blob_store::ExternalRawStoreDb; use crate::blob_store::{ExternalRawStoreDb, ExternalRepoBytesDb};
use crate::cir::model::{ use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
}; };
@ -799,16 +809,13 @@ mod tests {
}; };
{ {
let repo_bytes = ExternalRawStoreDb::open(&repo_bytes_db).unwrap(); let repo_bytes = ExternalRepoBytesDb::open(&repo_bytes_db).unwrap();
let mut entry_a = repo_bytes
crate::storage::RawByHashEntry::from_bytes(hex::encode(&cir.objects[0].sha256), a); .put_blob_bytes_batch(&[
entry_a.origin_uris.push(cir.objects[0].rsync_uri.clone()); (hex::encode(&cir.objects[0].sha256), a),
repo_bytes.put_raw_entry(&entry_a).unwrap(); (hex::encode(&cir.objects[1].sha256), b),
])
let mut entry_b = .unwrap();
crate::storage::RawByHashEntry::from_bytes(hex::encode(&cir.objects[1].sha256), b);
entry_b.origin_uris.push(cir.objects[1].rsync_uri.clone());
repo_bytes.put_raw_entry(&entry_b).unwrap();
} }
let summary = let summary =

View File

@ -8,6 +8,7 @@ pub enum CirSequenceStepKind {
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CirSequenceStep { pub struct CirSequenceStep {
pub step_id: String, pub step_id: String,
pub kind: CirSequenceStepKind, pub kind: CirSequenceStepKind,
@ -27,12 +28,10 @@ pub struct CirSequenceStep {
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CirSequenceManifest { pub struct CirSequenceManifest {
pub version: u32, pub version: u32,
#[serde(default)] pub repo_bytes_db_path: String,
pub static_root: Option<String>,
#[serde(default)]
pub raw_store_db_path: Option<String>,
pub steps: Vec<CirSequenceStep>, pub steps: Vec<CirSequenceStep>,
} }
@ -41,19 +40,8 @@ impl CirSequenceManifest {
if self.version == 0 { if self.version == 0 {
return Err("sequence.version must be positive".to_string()); return Err("sequence.version must be positive".to_string());
} }
let backend_count = if self.repo_bytes_db_path.trim().is_empty() {
self.static_root.is_some() as u8 + self.raw_store_db_path.is_some() as u8; return Err("sequence.repo_bytes_db_path must not be empty".to_string());
if backend_count != 1 {
return Err(
"sequence must set exactly one of static_root or raw_store_db_path".to_string(),
);
}
match (self.static_root.as_ref(), self.raw_store_db_path.as_ref()) {
(Some(static_root), None) if !static_root.trim().is_empty() => {}
(None, Some(raw_store_db_path)) if !raw_store_db_path.trim().is_empty() => {}
_ => {
return Err("sequence backend path must not be empty".to_string());
}
} }
if self.steps.is_empty() { if self.steps.is_empty() {
return Err("sequence.steps must not be empty".to_string()); return Err("sequence.steps must not be empty".to_string());
@ -142,8 +130,7 @@ mod tests {
fn sample_manifest() -> CirSequenceManifest { fn sample_manifest() -> CirSequenceManifest {
CirSequenceManifest { CirSequenceManifest {
version: 1, version: 1,
static_root: Some("static".to_string()), repo_bytes_db_path: "repo-bytes.db".to_string(),
raw_store_db_path: None,
steps: vec![ steps: vec![
CirSequenceStep { CirSequenceStep {
step_id: "full".to_string(), step_id: "full".to_string(),
@ -205,10 +192,12 @@ mod tests {
} }
#[test] #[test]
fn sequence_manifest_validate_accepts_raw_store_backend() { fn sequence_manifest_validate_rejects_empty_repo_bytes_backend() {
let mut manifest = sample_manifest(); let mut manifest = sample_manifest();
manifest.static_root = None; manifest.repo_bytes_db_path = " ".to_string();
manifest.raw_store_db_path = Some("raw-store.db".to_string()); let err = manifest
manifest.validate().expect("raw store sequence"); .validate()
.expect_err("empty repo bytes backend must fail");
assert!(err.contains("repo_bytes_db_path"));
} }
} }

View File

@ -1,14 +1,14 @@
use crate::ccr::{ use crate::ccr::{
build_ccr_from_run_with_breakdown, write_ccr_file, CcrAccumulator, CcrBuildBreakdown, CcrAccumulator, CcrBuildBreakdown, build_ccr_from_run_with_breakdown, write_ccr_file,
}; };
use crate::cir::{export_cir_from_run_multi, CirTalBinding}; use crate::cir::{CirTalBinding, export_cir_from_run_multi};
use std::io::BufWriter; use std::io::BufWriter;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::analysis::timing::{TimingHandle, TimingMeta, TimingMetaUpdate}; use crate::analysis::timing::{TimingHandle, TimingMeta, TimingMetaUpdate};
use crate::audit::{ use crate::audit::{
format_roa_ip_prefix, AspaOutput, AuditRepoSyncStats, AuditReportV2, AuditRunMeta, AspaOutput, AuditRepoSyncStats, AuditReportV2, AuditRunMeta, AuditWarning, TreeSummary,
AuditWarning, TreeSummary, VrpOutput, VrpOutput, format_roa_ip_prefix,
}; };
use crate::fetch::http::{BlockingHttpFetcher, HttpFetcherConfig}; use crate::fetch::http::{BlockingHttpFetcher, HttpFetcherConfig};
use crate::fetch::rsync::LocalDirRsyncFetcher; use crate::fetch::rsync::LocalDirRsyncFetcher;
@ -18,7 +18,7 @@ use crate::parallel::types::TalInputSpec;
use crate::policy::Policy; use crate::policy::Policy;
use crate::storage::RocksStore; use crate::storage::RocksStore;
use crate::validation::run_tree_from_tal::{ use crate::validation::run_tree_from_tal::{
run_tree_from_multiple_tals_parallel_phase1_audit, RunTreeFromTalAuditOutput, run_tree_from_multiple_tals_parallel_phase1_audit,
run_tree_from_multiple_tals_parallel_phase2_audit, run_tree_from_multiple_tals_parallel_phase2_audit,
run_tree_from_tal_and_ta_der_parallel_phase1_audit, run_tree_from_tal_and_ta_der_parallel_phase1_audit,
run_tree_from_tal_and_ta_der_parallel_phase2_audit, run_tree_from_tal_and_ta_der_parallel_phase2_audit,
@ -30,7 +30,6 @@ use crate::validation::run_tree_from_tal::{
run_tree_from_tal_and_ta_der_serial_audit_with_timing, run_tree_from_tal_and_ta_der_serial_audit_with_timing,
run_tree_from_tal_url_parallel_phase1_audit, run_tree_from_tal_url_parallel_phase2_audit, run_tree_from_tal_url_parallel_phase1_audit, run_tree_from_tal_url_parallel_phase2_audit,
run_tree_from_tal_url_serial_audit, run_tree_from_tal_url_serial_audit_with_timing, run_tree_from_tal_url_serial_audit, run_tree_from_tal_url_serial_audit_with_timing,
RunTreeFromTalAuditOutput,
}; };
use crate::validation::tree::TreeRunConfig; use crate::validation::tree::TreeRunConfig;
use serde::Serialize; use serde::Serialize;
@ -74,6 +73,7 @@ pub struct CliArgs {
pub db_path: PathBuf, pub db_path: PathBuf,
pub raw_store_db: Option<PathBuf>, pub raw_store_db: Option<PathBuf>,
pub repo_bytes_db: Option<PathBuf>,
pub policy_path: Option<PathBuf>, pub policy_path: Option<PathBuf>,
pub report_json_path: Option<PathBuf>, pub report_json_path: Option<PathBuf>,
pub report_json_compact: bool, pub report_json_compact: bool,
@ -118,6 +118,7 @@ Usage:
Options: Options:
--db <path> RocksDB directory path (required) --db <path> RocksDB directory path (required)
--raw-store-db <path> External raw-by-hash store DB path (optional) --raw-store-db <path> External raw-by-hash store DB path (optional)
--repo-bytes-db <path> External repo object bytes DB path (optional)
--policy <path> Policy TOML path (optional) --policy <path> Policy TOML path (optional)
--report-json <path> Write full audit report as JSON (optional) --report-json <path> Write full audit report as JSON (optional)
--report-json-compact Write report JSON without pretty-printing (requires --report-json) --report-json-compact Write report JSON without pretty-printing (requires --report-json)
@ -180,6 +181,7 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let mut db_path: Option<PathBuf> = None; let mut db_path: Option<PathBuf> = None;
let mut raw_store_db: Option<PathBuf> = None; let mut raw_store_db: Option<PathBuf> = None;
let mut repo_bytes_db: Option<PathBuf> = None;
let mut policy_path: Option<PathBuf> = None; let mut policy_path: Option<PathBuf> = None;
let mut report_json_path: Option<PathBuf> = None; let mut report_json_path: Option<PathBuf> = None;
let mut report_json_compact: bool = false; let mut report_json_compact: bool = false;
@ -296,6 +298,11 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let v = argv.get(i).ok_or("--raw-store-db requires a value")?; let v = argv.get(i).ok_or("--raw-store-db requires a value")?;
raw_store_db = Some(PathBuf::from(v)); raw_store_db = Some(PathBuf::from(v));
} }
"--repo-bytes-db" => {
i += 1;
let v = argv.get(i).ok_or("--repo-bytes-db requires a value")?;
repo_bytes_db = Some(PathBuf::from(v));
}
"--policy" => { "--policy" => {
i += 1; i += 1;
let v = argv.get(i).ok_or("--policy requires a value")?; let v = argv.get(i).ok_or("--policy requires a value")?;
@ -675,6 +682,7 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
tal_inputs, tal_inputs,
db_path, db_path,
raw_store_db, raw_store_db,
repo_bytes_db,
policy_path, policy_path,
report_json_path, report_json_path,
report_json_compact, report_json_compact,
@ -1066,9 +1074,13 @@ pub fn run(argv: &[String]) -> Result<(), String> {
.validation_time .validation_time
.unwrap_or_else(time::OffsetDateTime::now_utc); .unwrap_or_else(time::OffsetDateTime::now_utc);
let store = if let Some(raw_store_db) = args.raw_store_db.as_ref() { let store = if args.raw_store_db.is_some() || args.repo_bytes_db.is_some() {
Arc::new( Arc::new(
RocksStore::open_with_external_raw_store(&args.db_path, raw_store_db) RocksStore::open_with_external_stores(
&args.db_path,
args.raw_store_db.as_deref(),
args.repo_bytes_db.as_deref(),
)
.map_err(|e| e.to_string())?, .map_err(|e| e.to_string())?,
) )
} else { } else {
@ -2050,6 +2062,24 @@ mod tests {
); );
} }
#[test]
fn parse_accepts_external_repo_bytes_db() {
let argv = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--repo-bytes-db".to_string(),
"repo-bytes.db".to_string(),
"--tal-url".to_string(),
"https://example.test/x.tal".to_string(),
];
let args = parse_args(&argv).expect("parse args");
assert_eq!(
args.repo_bytes_db.as_deref(),
Some(std::path::Path::new("repo-bytes.db"))
);
}
#[test] #[test]
fn parse_accepts_cir_enable_with_raw_store_backend() { fn parse_accepts_cir_enable_with_raw_store_backend() {
let argv = vec![ let argv = vec![
@ -2764,9 +2794,11 @@ mod tests {
let tree = crate::validation::tree::TreeRunOutput { let tree = crate::validation::tree::TreeRunOutput {
instances_processed: 1, instances_processed: 1,
instances_failed: 0, instances_failed: 0,
warnings: vec![crate::report::Warning::new("synthetic warning") warnings: vec![
crate::report::Warning::new("synthetic warning")
.with_rfc_refs(&[crate::report::RfcRef("RFC 6487 §4.8.8.1")]) .with_rfc_refs(&[crate::report::RfcRef("RFC 6487 §4.8.8.1")])
.with_context("rsync://example.test/repo/pp/")], .with_context("rsync://example.test/repo/pp/"),
],
vrps: vec![crate::validation::objects::Vrp { vrps: vec![crate::validation::objects::Vrp {
asn: 64496, asn: 64496,
prefix: crate::data_model::roa::IpPrefix { prefix: crate::data_model::roa::IpPrefix {

View File

@ -1,5 +1,5 @@
use std::sync::mpsc::{self, Receiver, RecvTimeoutError, SyncSender, TrySendError};
use std::sync::Arc; use std::sync::Arc;
use std::sync::mpsc::{self, Receiver, RecvTimeoutError, SyncSender, TrySendError};
use std::thread::{self, JoinHandle}; use std::thread::{self, JoinHandle};
use std::time::Duration; use std::time::Duration;

View File

@ -229,18 +229,24 @@ mod tests {
#[test] #[test]
fn scheduler_completion_requires_all_queues_and_inflight_to_be_empty() { fn scheduler_completion_requires_all_queues_and_inflight_to_be_empty() {
let mut state = Phase2SchedulerState::new(); let mut state = Phase2SchedulerState::new();
assert!(state assert!(
state
.completion_snapshot(true, true, true, true) .completion_snapshot(true, true, true, true)
.is_complete()); .is_complete()
);
state.enqueue_roa_task(1u64); state.enqueue_roa_task(1u64);
assert!(!state assert!(
!state
.completion_snapshot(true, true, true, true) .completion_snapshot(true, true, true, true)
.is_complete()); .is_complete()
);
assert_eq!(state.pop_pending_roa_dispatch(), Some(1)); assert_eq!(state.pop_pending_roa_dispatch(), Some(1));
assert!(state assert!(
state
.completion_snapshot(true, true, true, true) .completion_snapshot(true, true, true, true)
.is_complete()); .is_complete()
);
} }
#[test] #[test]
@ -317,8 +323,10 @@ mod tests {
assert_eq!(results[1].worker_index, 1); assert_eq!(results[1].worker_index, 1);
assert_eq!(results[2].worker_index, 0); assert_eq!(results[2].worker_index, 0);
assert_eq!(state.inflight_len(), 0); assert_eq!(state.inflight_len(), 0);
assert!(state assert!(
state
.completion_snapshot(true, true, true, true) .completion_snapshot(true, true, true, true)
.is_complete()); .is_complete()
);
} }
} }

View File

@ -2,13 +2,13 @@ use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use rocksdb::{ use rocksdb::{
ColumnFamily, ColumnFamilyDescriptor, DBCompressionType, Direction, IteratorMode, Options, ColumnFamily, ColumnFamilyDescriptor, DB, DBCompressionType, Direction, IteratorMode, Options,
WriteBatch, DB, WriteBatch,
}; };
use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde::{Deserialize, Serialize, de::DeserializeOwned};
use sha2::Digest; use sha2::Digest;
use crate::blob_store::{ExternalRawStoreDb, RawObjectStore}; use crate::blob_store::{ExternalRawStoreDb, ExternalRepoBytesDb, RawObjectStore};
use crate::data_model::common::der_take_tlv; use crate::data_model::common::der_take_tlv;
use crate::data_model::rc::{AsResourceSet, IpResourceSet}; use crate::data_model::rc::{AsResourceSet, IpResourceSet};
@ -87,6 +87,7 @@ pub type StorageResult<T> = Result<T, StorageError>;
pub struct RocksStore { pub struct RocksStore {
db: DB, db: DB,
external_raw_store: Option<ExternalRawStoreDb>, external_raw_store: Option<ExternalRawStoreDb>,
external_repo_bytes: Option<ExternalRepoBytesDb>,
} }
pub mod pack { pub mod pack {
@ -854,12 +855,33 @@ impl RocksStore {
Ok(Self { Ok(Self {
db, db,
external_raw_store: None, external_raw_store: None,
external_repo_bytes: None,
}) })
} }
pub fn open_with_external_raw_store(path: &Path, raw_store_path: &Path) -> StorageResult<Self> { pub fn open_with_external_raw_store(path: &Path, raw_store_path: &Path) -> StorageResult<Self> {
Self::open_with_external_stores(path, Some(raw_store_path), None)
}
pub fn open_with_external_repo_bytes(
path: &Path,
repo_bytes_path: &Path,
) -> StorageResult<Self> {
Self::open_with_external_stores(path, None, Some(repo_bytes_path))
}
pub fn open_with_external_stores(
path: &Path,
raw_store_path: Option<&Path>,
repo_bytes_path: Option<&Path>,
) -> StorageResult<Self> {
let mut store = Self::open(path)?; let mut store = Self::open(path)?;
if let Some(raw_store_path) = raw_store_path {
store.external_raw_store = Some(ExternalRawStoreDb::open(raw_store_path)?); store.external_raw_store = Some(ExternalRawStoreDb::open(raw_store_path)?);
}
if let Some(repo_bytes_path) = repo_bytes_path {
store.external_repo_bytes = Some(ExternalRepoBytesDb::open(repo_bytes_path)?);
}
Ok(store) Ok(store)
} }
@ -867,6 +889,10 @@ impl RocksStore {
self.external_raw_store.as_ref() self.external_raw_store.as_ref()
} }
pub(crate) fn external_repo_bytes_ref(&self) -> Option<&ExternalRepoBytesDb> {
self.external_repo_bytes.as_ref()
}
fn cf(&self, name: &'static str) -> StorageResult<&ColumnFamily> { fn cf(&self, name: &'static str) -> StorageResult<&ColumnFamily> {
self.db self.db
.cf_handle(name) .cf_handle(name)
@ -1031,6 +1057,9 @@ impl RocksStore {
if blobs.is_empty() { if blobs.is_empty() {
return Ok(()); return Ok(());
} }
if let Some(repo_bytes) = self.external_repo_bytes.as_ref() {
return repo_bytes.put_blob_bytes_batch(blobs);
}
if let Some(raw_store) = self.external_raw_store.as_ref() { if let Some(raw_store) = self.external_raw_store.as_ref() {
return raw_store.put_blob_bytes_batch(blobs); return raw_store.put_blob_bytes_batch(blobs);
} }
@ -1115,6 +1144,9 @@ impl RocksStore {
} }
pub fn get_blob_bytes(&self, sha256_hex: &str) -> StorageResult<Option<Vec<u8>>> { pub fn get_blob_bytes(&self, sha256_hex: &str) -> StorageResult<Option<Vec<u8>>> {
if let Some(repo_bytes) = self.external_repo_bytes.as_ref() {
return repo_bytes.get_blob_bytes(sha256_hex);
}
if let Some(raw_store) = self.external_raw_store.as_ref() { if let Some(raw_store) = self.external_raw_store.as_ref() {
return raw_store.get_blob_bytes(sha256_hex); return raw_store.get_blob_bytes(sha256_hex);
} }
@ -1139,6 +1171,9 @@ impl RocksStore {
if sha256_hexes.is_empty() { if sha256_hexes.is_empty() {
return Ok(Vec::new()); return Ok(Vec::new());
} }
if let Some(repo_bytes) = self.external_repo_bytes.as_ref() {
return repo_bytes.get_blob_bytes_batch(sha256_hexes);
}
if let Some(raw_store) = self.external_raw_store.as_ref() { if let Some(raw_store) = self.external_raw_store.as_ref() {
return raw_store.get_blob_bytes_batch(sha256_hexes); return raw_store.get_blob_bytes_batch(sha256_hexes);
} }
@ -1717,6 +1752,11 @@ pub enum PackBytes {
store: std::sync::Arc<ExternalRawStoreDb>, store: std::sync::Arc<ExternalRawStoreDb>,
cache: std::sync::Arc<std::sync::OnceLock<std::sync::Arc<[u8]>>>, cache: std::sync::Arc<std::sync::OnceLock<std::sync::Arc<[u8]>>>,
}, },
LazyRepoBytes {
sha256_hex: String,
store: std::sync::Arc<ExternalRepoBytesDb>,
cache: std::sync::Arc<std::sync::OnceLock<std::sync::Arc<[u8]>>>,
},
} }
impl PackBytes { impl PackBytes {
@ -1732,6 +1772,14 @@ impl PackBytes {
} }
} }
pub fn lazy_repo_bytes(sha256_hex: String, store: std::sync::Arc<ExternalRepoBytesDb>) -> Self {
Self::LazyRepoBytes {
sha256_hex,
store,
cache: std::sync::Arc::new(std::sync::OnceLock::new()),
}
}
pub fn as_slice(&self) -> Result<&[u8], String> { pub fn as_slice(&self) -> Result<&[u8], String> {
match self { match self {
Self::Eager(bytes) => Ok(bytes.as_ref()), Self::Eager(bytes) => Ok(bytes.as_ref()),
@ -1752,6 +1800,23 @@ impl PackBytes {
.ok_or_else(|| format!("missing raw blob cache for sha256={sha256_hex}"))?; .ok_or_else(|| format!("missing raw blob cache for sha256={sha256_hex}"))?;
Ok(bytes.as_ref()) Ok(bytes.as_ref())
} }
Self::LazyRepoBytes {
sha256_hex,
store,
cache,
} => {
if cache.get().is_none() {
let bytes = store
.get_blob_bytes(sha256_hex)
.map_err(|e| e.to_string())?
.ok_or_else(|| format!("missing repo bytes for sha256={sha256_hex}"))?;
let _ = cache.set(std::sync::Arc::from(bytes));
}
let bytes = cache
.get()
.ok_or_else(|| format!("missing repo bytes cache for sha256={sha256_hex}"))?;
Ok(bytes.as_ref())
}
} }
} }
@ -1808,6 +1873,19 @@ impl PackFile {
) )
} }
pub fn from_lazy_repo_bytes(
rsync_uri: impl Into<String>,
sha256_hex: String,
sha256: [u8; 32],
store: std::sync::Arc<ExternalRepoBytesDb>,
) -> Self {
Self::new(
rsync_uri,
PackBytes::lazy_repo_bytes(sha256_hex, store),
sha256,
)
}
pub fn from_bytes_compute_sha256(rsync_uri: impl Into<String>, bytes: Vec<u8>) -> Self { pub fn from_bytes_compute_sha256(rsync_uri: impl Into<String>, bytes: Vec<u8>) -> Self {
let sha256 = compute_sha256_32(&bytes); let sha256 = compute_sha256_32(&bytes);
Self::new(rsync_uri, PackBytes::eager(bytes), sha256) Self::new(rsync_uri, PackBytes::eager(bytes), sha256)
@ -2170,10 +2248,12 @@ mod tests {
store store
.delete_repository_view_entry(&entry1.rsync_uri) .delete_repository_view_entry(&entry1.rsync_uri)
.expect("delete repository view entry1"); .expect("delete repository view entry1");
assert!(store assert!(
store
.get_repository_view_entry(&entry1.rsync_uri) .get_repository_view_entry(&entry1.rsync_uri)
.expect("get deleted repository view entry1") .expect("get deleted repository view entry1")
.is_none()); .is_none()
);
let raw = sample_raw_by_hash_entry(b"raw-der-object".to_vec()); let raw = sample_raw_by_hash_entry(b"raw-der-object".to_vec());
store store
@ -2230,10 +2310,12 @@ mod tests {
store.get_blob_bytes(&hash).expect("get blob bytes"), store.get_blob_bytes(&hash).expect("get blob bytes"),
Some(bytes.clone()) Some(bytes.clone())
); );
assert!(store assert!(
store
.get_raw_by_hash_entry(&hash) .get_raw_by_hash_entry(&hash)
.expect("get raw entry") .expect("get raw entry")
.is_none()); .is_none()
);
} }
#[test] #[test]
@ -2255,6 +2337,54 @@ mod tests {
assert!(store.get_raw_by_hash_entry(&hash).unwrap().is_none()); assert!(store.get_raw_by_hash_entry(&hash).unwrap().is_none());
} }
#[test]
fn repo_bytes_db_is_physically_separate_from_external_raw_store() {
let td = tempfile::tempdir().expect("tempdir");
let main_db = td.path().join("main-db");
let raw_db = td.path().join("raw-store.db");
let repo_bytes_db = td.path().join("repo-bytes.db");
let store =
RocksStore::open_with_external_stores(&main_db, Some(&raw_db), Some(&repo_bytes_db))
.expect("open store");
let repo_bytes = b"repo-object".to_vec();
let repo_hash = sha256_hex(&repo_bytes);
let raw = sample_raw_by_hash_entry(b"raw-evidence".to_vec());
store
.put_blob_bytes_batch(&[(repo_hash.clone(), repo_bytes.clone())])
.expect("put repo bytes");
store.put_raw_by_hash_entry(&raw).expect("put raw evidence");
assert_eq!(store.get_blob_bytes(&repo_hash).unwrap(), Some(repo_bytes));
assert_eq!(
store.get_raw_by_hash_entry(&raw.sha256_hex).unwrap(),
Some(raw.clone())
);
drop(store);
let raw_only =
RocksStore::open_with_external_raw_store(&td.path().join("raw-reader"), &raw_db)
.expect("open raw only");
assert!(
raw_only.get_blob_bytes(&repo_hash).unwrap().is_none(),
"repo object bytes must not be written into raw-store.db"
);
let repo_only = RocksStore::open_with_external_repo_bytes(
&td.path().join("repo-reader"),
&repo_bytes_db,
)
.expect("open repo bytes only");
assert_eq!(
repo_only.get_blob_bytes(&repo_hash).unwrap(),
Some(b"repo-object".to_vec())
);
assert!(
repo_only.get_blob_bytes(&raw.sha256_hex).unwrap().is_none(),
"raw evidence bytes must not be written into repo-bytes.db"
);
}
#[test] #[test]
fn put_blob_bytes_batch_accepts_empty_batch_with_external_raw_store() { fn put_blob_bytes_batch_accepts_empty_batch_with_external_raw_store() {
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
@ -2355,10 +2485,12 @@ mod tests {
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(td.path()).expect("open rocksdb"); let store = RocksStore::open(td.path()).expect("open rocksdb");
assert!(store assert!(
store
.get_blob_bytes_batch(&[]) .get_blob_bytes_batch(&[])
.expect("empty blob batch request") .expect("empty blob batch request")
.is_empty()); .is_empty()
);
} }
#[test] #[test]
@ -2429,10 +2561,12 @@ mod tests {
.delete_raw_by_hash_entry(&raw.sha256_hex) .delete_raw_by_hash_entry(&raw.sha256_hex)
.expect("delete external raw entry"); .expect("delete external raw entry");
assert!(store assert!(
store
.get_raw_by_hash_entry(&raw.sha256_hex) .get_raw_by_hash_entry(&raw.sha256_hex)
.unwrap() .unwrap()
.is_none()); .is_none()
);
assert!(store.get_blob_bytes(&raw.sha256_hex).unwrap().is_none()); assert!(store.get_blob_bytes(&raw.sha256_hex).unwrap().is_none());
} }
@ -2496,10 +2630,12 @@ mod tests {
store store
.delete_vcir(&vcir.manifest_rsync_uri) .delete_vcir(&vcir.manifest_rsync_uri)
.expect("delete vcir"); .expect("delete vcir");
assert!(store assert!(
store
.get_vcir(&vcir.manifest_rsync_uri) .get_vcir(&vcir.manifest_rsync_uri)
.expect("get deleted vcir") .expect("get deleted vcir")
.is_none()); .is_none()
);
} }
#[test] #[test]
@ -2554,10 +2690,12 @@ mod tests {
store store
.delete_audit_rule_index_entry(AuditRuleKind::Roa, &roa.rule_hash) .delete_audit_rule_index_entry(AuditRuleKind::Roa, &roa.rule_hash)
.expect("delete roa audit rule entry"); .expect("delete roa audit rule entry");
assert!(store assert!(
store
.get_audit_rule_index_entry(AuditRuleKind::Roa, &roa.rule_hash) .get_audit_rule_index_entry(AuditRuleKind::Roa, &roa.rule_hash)
.expect("get deleted roa audit rule entry") .expect("get deleted roa audit rule entry")
.is_none()); .is_none()
);
let mut invalid = sample_audit_rule_entry(AuditRuleKind::Roa); let mut invalid = sample_audit_rule_entry(AuditRuleKind::Roa);
invalid.rule_hash = "bad".to_string(); invalid.rule_hash = "bad".to_string();
@ -2591,10 +2729,15 @@ mod tests {
store store
.replace_vcir_and_audit_rule_indexes(None, &previous) .replace_vcir_and_audit_rule_indexes(None, &previous)
.expect("store previous vcir"); .expect("store previous vcir");
assert!(store assert!(
.get_audit_rule_index_entry(AuditRuleKind::Roa, &previous.local_outputs[0].rule_hash) store
.get_audit_rule_index_entry(
AuditRuleKind::Roa,
&previous.local_outputs[0].rule_hash
)
.expect("get old audit entry") .expect("get old audit entry")
.is_some()); .is_some()
);
let mut current = sample_vcir("rsync://example.test/repo/current.mft"); let mut current = sample_vcir("rsync://example.test/repo/current.mft");
current.local_outputs = vec![VcirLocalOutput { current.local_outputs = vec![VcirLocalOutput {
@ -2620,14 +2763,24 @@ mod tests {
.expect("get replaced vcir") .expect("get replaced vcir")
.expect("vcir exists"); .expect("vcir exists");
assert_eq!(got, current); assert_eq!(got, current);
assert!(store assert!(
.get_audit_rule_index_entry(AuditRuleKind::Roa, &previous.local_outputs[0].rule_hash) store
.get_audit_rule_index_entry(
AuditRuleKind::Roa,
&previous.local_outputs[0].rule_hash
)
.expect("get deleted old audit entry") .expect("get deleted old audit entry")
.is_none()); .is_none()
assert!(store );
.get_audit_rule_index_entry(AuditRuleKind::Aspa, &current.local_outputs[0].rule_hash) assert!(
store
.get_audit_rule_index_entry(
AuditRuleKind::Aspa,
&current.local_outputs[0].rule_hash
)
.expect("get new audit entry") .expect("get new audit entry")
.is_some()); .is_some()
);
} }
#[test] #[test]
@ -2769,10 +2922,12 @@ mod tests {
store store
.delete_rrdp_uri_owner_record(&member1.rsync_uri) .delete_rrdp_uri_owner_record(&member1.rsync_uri)
.expect("delete uri owner record"); .expect("delete uri owner record");
assert!(store assert!(
store
.get_rrdp_uri_owner_record(&member1.rsync_uri) .get_rrdp_uri_owner_record(&member1.rsync_uri)
.expect("get deleted uri owner") .expect("get deleted uri owner")
.is_none()); .is_none()
);
let mut invalid_source = let mut invalid_source =
sample_rrdp_source_record("https://invalid.example/notification.xml"); sample_rrdp_source_record("https://invalid.example/notification.xml");
@ -2903,15 +3058,21 @@ mod tests {
] ]
); );
assert!(store assert!(
store
.is_current_rrdp_source_member(notify_uri, &present_a.rsync_uri) .is_current_rrdp_source_member(notify_uri, &present_a.rsync_uri)
.expect("current a")); .expect("current a")
assert!(!store );
assert!(
!store
.is_current_rrdp_source_member(notify_uri, &withdrawn_b.rsync_uri) .is_current_rrdp_source_member(notify_uri, &withdrawn_b.rsync_uri)
.expect("withdrawn b")); .expect("withdrawn b")
assert!(!store );
assert!(
!store
.is_current_rrdp_source_member(notify_uri, &other_source.rsync_uri) .is_current_rrdp_source_member(notify_uri, &other_source.rsync_uri)
.expect("other source")); .expect("other source")
);
} }
#[test] #[test]
@ -3077,10 +3238,12 @@ mod tests {
assert_eq!(got.current_hash_hex, hash); assert_eq!(got.current_hash_hex, hash);
assert_eq!(got.current_hash, compute_sha256_32(&bytes)); assert_eq!(got.current_hash, compute_sha256_32(&bytes));
assert_eq!(got.bytes, bytes); assert_eq!(got.bytes, bytes);
assert!(store assert!(
store
.get_raw_by_hash_entry(&got.current_hash_hex) .get_raw_by_hash_entry(&got.current_hash_hex)
.expect("get raw entry") .expect("get raw entry")
.is_none()); .is_none()
);
} }
#[test] #[test]

View File

@ -4,7 +4,7 @@ use crate::data_model::signed_object::SignedObjectVerifyError;
use crate::policy::{CaFailedFetchPolicy, Policy}; use crate::policy::{CaFailedFetchPolicy, Policy};
use crate::report::{RfcRef, Warning}; use crate::report::{RfcRef, Warning};
use crate::storage::{PackFile, PackTime, RocksStore, StorageError, VcirArtifactRole}; use crate::storage::{PackFile, PackTime, RocksStore, StorageError, VcirArtifactRole};
use crate::validation::cert_path::{validate_signed_object_ee_cert_path_fast, CertPathError}; use crate::validation::cert_path::{CertPathError, validate_signed_object_ee_cert_path_fast};
use crate::validation::publication_point::PublicationPointSnapshot; use crate::validation::publication_point::PublicationPointSnapshot;
use sha2::Digest; use sha2::Digest;
use std::cmp::Ordering; use std::cmp::Ordering;
@ -398,10 +398,11 @@ pub fn process_manifest_publication_point_after_repo_sync(
Err(ManifestProcessError::StopAllOutput(fresh_err)) Err(ManifestProcessError::StopAllOutput(fresh_err))
} }
CaFailedFetchPolicy::ReuseCurrentInstanceVcir => { CaFailedFetchPolicy::ReuseCurrentInstanceVcir => {
let mut warnings = let mut warnings = vec![
vec![Warning::new(format!("manifest failed fetch: {fresh_err}")) Warning::new(format!("manifest failed fetch: {fresh_err}"))
.with_rfc_refs(&[RfcRef("RFC 9286 §6.6")]) .with_rfc_refs(&[RfcRef("RFC 9286 §6.6")])
.with_context(manifest_rsync_uri)]; .with_context(manifest_rsync_uri),
];
match load_current_instance_vcir_publication_point( match load_current_instance_vcir_publication_point(
store, store,
@ -740,6 +741,10 @@ pub(crate) fn try_build_fresh_publication_point_with_timing(
.external_raw_store_ref() .external_raw_store_ref()
.cloned() .cloned()
.map(std::sync::Arc::new); .map(std::sync::Arc::new);
let external_repo_bytes = store
.external_repo_bytes_ref()
.cloned()
.map(std::sync::Arc::new);
for entry in &entries { for entry in &entries {
let rsync_uri = let rsync_uri =
join_rsync_dir_and_file(publication_point_rsync_uri, entry.file_name.as_str()); join_rsync_dir_and_file(publication_point_rsync_uri, entry.file_name.as_str());
@ -770,7 +775,16 @@ pub(crate) fn try_build_fresh_publication_point_with_timing(
return Err(ManifestFreshError::HashMismatch { rsync_uri }); return Err(ManifestFreshError::HashMismatch { rsync_uri });
} }
if let (Some(_), Some(raw_store)) = if let (Some(_), Some(repo_bytes)) =
(current_index_guard.as_ref(), external_repo_bytes.as_ref())
{
files.push(PackFile::from_lazy_repo_bytes(
rsync_uri,
current_object.current_hash_hex,
current_object.current_hash,
repo_bytes.clone(),
));
} else if let (Some(_), Some(raw_store)) =
(current_index_guard.as_ref(), external_raw_store.as_ref()) (current_index_guard.as_ref(), external_raw_store.as_ref())
{ {
files.push(PackFile::from_lazy_external_raw_store( files.push(PackFile::from_lazy_external_raw_store(
@ -1327,10 +1341,12 @@ mod tests {
.apply_repository_view_entries(&entries) .apply_repository_view_entries(&entries)
.expect("apply current index"); .expect("apply current index");
assert!(store assert!(
store
.get_repository_view_entry(&manifest_rsync_uri) .get_repository_view_entry(&manifest_rsync_uri)
.expect("get repository view") .expect("get repository view")
.is_none()); .is_none()
);
let (fresh, _timing) = try_build_fresh_publication_point_with_timing( let (fresh, _timing) = try_build_fresh_publication_point_with_timing(
&store, &store,

View File

@ -1,5 +1,5 @@
use crate::analysis::timing::TimingHandle; use crate::analysis::timing::TimingHandle;
use crate::audit::{sha256_hex_from_32, AuditObjectKind, AuditObjectResult, ObjectAuditEntry}; use crate::audit::{AuditObjectKind, AuditObjectResult, ObjectAuditEntry, sha256_hex_from_32};
use crate::data_model::aspa::{AspaDecodeError, AspaObject, AspaValidateError}; use crate::data_model::aspa::{AspaDecodeError, AspaObject, AspaValidateError};
use crate::data_model::manifest::ManifestObject; use crate::data_model::manifest::ManifestObject;
use crate::data_model::rc::{ use crate::data_model::rc::{
@ -15,7 +15,7 @@ use crate::parallel::object_worker::{
use crate::policy::{Policy, SignedObjectFailurePolicy}; use crate::policy::{Policy, SignedObjectFailurePolicy};
use crate::report::{RfcRef, Warning}; use crate::report::{RfcRef, Warning};
use crate::storage::{PackFile, PackTime, VcirLocalOutput, VcirOutputType}; use crate::storage::{PackFile, PackTime, VcirLocalOutput, VcirOutputType};
use crate::validation::cert_path::{validate_signed_object_ee_cert_path_fast, CertPathError}; use crate::validation::cert_path::{CertPathError, validate_signed_object_ee_cert_path_fast};
use crate::validation::manifest::PublicationPointData; use crate::validation::manifest::PublicationPointData;
use crate::validation::publication_point::PublicationPointSnapshot; use crate::validation::publication_point::PublicationPointSnapshot;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};

View File

@ -23,13 +23,13 @@ use crate::replay::fetch_http::PayloadReplayHttpFetcher;
use crate::replay::fetch_rsync::PayloadReplayRsyncFetcher; use crate::replay::fetch_rsync::PayloadReplayRsyncFetcher;
use crate::sync::rrdp::Fetcher; use crate::sync::rrdp::Fetcher;
use crate::validation::from_tal::{ use crate::validation::from_tal::{
discover_root_ca_instance_from_tal_and_ta_der, discover_root_ca_instance_from_tal_url, DiscoveredRootCaInstance, FromTalError, discover_root_ca_instance_from_tal_and_ta_der,
discover_root_ca_instance_from_tal_with_fetchers, DiscoveredRootCaInstance, FromTalError, discover_root_ca_instance_from_tal_url, discover_root_ca_instance_from_tal_with_fetchers,
}; };
use crate::validation::objects::ParallelRoaWorkerPool; use crate::validation::objects::ParallelRoaWorkerPool;
use crate::validation::tree::{ use crate::validation::tree::{
run_tree_serial, run_tree_serial_audit, run_tree_serial_audit_multi_root, CaInstanceHandle, CaInstanceHandle, TreeRunAuditOutput, TreeRunConfig, TreeRunError, TreeRunOutput,
TreeRunAuditOutput, TreeRunConfig, TreeRunError, TreeRunOutput, run_tree_serial, run_tree_serial_audit, run_tree_serial_audit_multi_root,
}; };
use crate::validation::tree_parallel::{ use crate::validation::tree_parallel::{
run_tree_parallel_phase2_audit, run_tree_parallel_phase2_audit_multi_root, run_tree_parallel_phase2_audit, run_tree_parallel_phase2_audit_multi_root,

View File

@ -8,13 +8,13 @@ use crate::parallel::types::RepoIdentity;
use crate::policy::SignedObjectFailurePolicy; use crate::policy::SignedObjectFailurePolicy;
use crate::report::Warning; use crate::report::Warning;
use crate::validation::objects::{ use crate::validation::objects::{
prepare_publication_point_for_parallel_roa, reduce_parallel_roa_stage, ObjectsOutput, ObjectsOutput, OwnedRoaTask, ParallelObjectsPrepare, ParallelObjectsStage,
OwnedRoaTask, ParallelObjectsPrepare, ParallelObjectsStage, prepare_publication_point_for_parallel_roa, reduce_parallel_roa_stage,
}; };
use crate::validation::tree::{ use crate::validation::tree::{
run_tree_serial_audit_multi_root, CaInstanceHandle, DiscoveredChildCaInstance, CaInstanceHandle, DiscoveredChildCaInstance, PublicationPointRunResult, PublicationPointRunner,
PublicationPointRunResult, PublicationPointRunner, TreeRunAuditOutput, TreeRunConfig, TreeRunAuditOutput, TreeRunConfig, TreeRunError, TreeRunOutput,
TreeRunError, TreeRunOutput, run_tree_serial_audit_multi_root,
}; };
use crate::validation::tree_runner::{FreshPublicationPointStage, Rpkiv1PublicationPointRunner}; use crate::validation::tree_runner::{FreshPublicationPointStage, Rpkiv1PublicationPointRunner};

View File

@ -1,7 +1,7 @@
use crate::analysis::timing::TimingHandle; use crate::analysis::timing::TimingHandle;
use crate::audit::{ use crate::audit::{
sha256_hex, sha256_hex_from_32, AuditObjectKind, AuditObjectResult, AuditWarning, AuditObjectKind, AuditObjectResult, AuditWarning, ObjectAuditEntry, PublicationPointAudit,
ObjectAuditEntry, PublicationPointAudit, sha256_hex, sha256_hex_from_32,
}; };
use crate::audit_downloads::DownloadLogHandle; use crate::audit_downloads::DownloadLogHandle;
use crate::ccr::CcrAccumulator; use crate::ccr::CcrAccumulator;
@ -34,18 +34,18 @@ use crate::sync::repo::{
use crate::sync::rrdp::Fetcher; use crate::sync::rrdp::Fetcher;
use crate::validation::ca_instance::ca_instance_uris_from_ca_certificate; use crate::validation::ca_instance::ca_instance_uris_from_ca_certificate;
use crate::validation::ca_path::{ use crate::validation::ca_path::{
validate_subordinate_ca_cert_with_prevalidated_issuer_and_resources, CaPathError, CaPathError, IssuerEffectiveResourcesIndex, ValidatedSubordinateCaLite,
IssuerEffectiveResourcesIndex, ValidatedSubordinateCaLite, validate_subordinate_ca_cert_with_prevalidated_issuer_and_resources,
}; };
use crate::validation::manifest::{ use crate::validation::manifest::{
process_manifest_publication_point_fresh_after_repo_sync_with_timing,
FreshPublicationPointTimingBreakdown, FreshValidatedPublicationPoint, ManifestFreshError, FreshPublicationPointTimingBreakdown, FreshValidatedPublicationPoint, ManifestFreshError,
PublicationPointData, PublicationPointSource, PublicationPointData, PublicationPointSource,
process_manifest_publication_point_fresh_after_repo_sync_with_timing,
}; };
use crate::validation::objects::{ use crate::validation::objects::{
AspaAttestation, ParallelRoaWorkerPool, RouterKeyPayload, Vrp,
process_publication_point_for_issuer, process_publication_point_for_issuer_parallel_roa, process_publication_point_for_issuer, process_publication_point_for_issuer_parallel_roa,
process_publication_point_for_issuer_parallel_roa_with_pool, AspaAttestation, process_publication_point_for_issuer_parallel_roa_with_pool,
ParallelRoaWorkerPool, RouterKeyPayload, Vrp,
}; };
use crate::validation::publication_point::PublicationPointSnapshot; use crate::validation::publication_point::PublicationPointSnapshot;
use crate::validation::tree::{ use crate::validation::tree::{
@ -228,9 +228,11 @@ impl<'a> Rpkiv1PublicationPointRunner<'a> {
Vec::new(), Vec::new(),
Vec::new(), Vec::new(),
Vec::new(), Vec::new(),
vec![Warning::new(format!("child CA discovery failed: {e}")) vec![
Warning::new(format!("child CA discovery failed: {e}"))
.with_rfc_refs(&[RfcRef("RFC 6487 §7.2")]) .with_rfc_refs(&[RfcRef("RFC 6487 §7.2")])
.with_context(&ca.manifest_rsync_uri)], .with_context(&ca.manifest_rsync_uri),
],
), ),
}; };
let child_discovery_ms = child_discovery_started.elapsed().as_millis() as u64; let child_discovery_ms = child_discovery_started.elapsed().as_millis() as u64;
@ -1946,9 +1948,11 @@ fn project_current_instance_vcir_on_failed_fetch(
fresh_err: &ManifestFreshError, fresh_err: &ManifestFreshError,
validation_time: time::OffsetDateTime, validation_time: time::OffsetDateTime,
) -> Result<VcirReuseProjection, String> { ) -> Result<VcirReuseProjection, String> {
let mut warnings = vec![Warning::new(format!("manifest failed fetch: {fresh_err}")) let mut warnings = vec![
Warning::new(format!("manifest failed fetch: {fresh_err}"))
.with_rfc_refs(&[RfcRef("RFC 9286 §6.6")]) .with_rfc_refs(&[RfcRef("RFC 9286 §6.6")])
.with_context(&ca.manifest_rsync_uri)]; .with_context(&ca.manifest_rsync_uri),
];
let Some(vcir) = store let Some(vcir) = store
.get_vcir(&ca.manifest_rsync_uri) .get_vcir(&ca.manifest_rsync_uri)
@ -3236,8 +3240,8 @@ mod tests {
use crate::validation::tree::PublicationPointRunner; use crate::validation::tree::PublicationPointRunner;
use std::process::Command; use std::process::Command;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
struct NeverHttpFetcher; struct NeverHttpFetcher;
impl Fetcher for NeverHttpFetcher { impl Fetcher for NeverHttpFetcher {
@ -3666,8 +3670,8 @@ authorityKeyIdentifier = keyid:always
} }
} }
fn cernet_publication_point_snapshot_for_vcir_tests( fn cernet_publication_point_snapshot_for_vcir_tests()
) -> (PublicationPointSnapshot, Vec<u8>, time::OffsetDateTime) { -> (PublicationPointSnapshot, Vec<u8>, time::OffsetDateTime) {
let dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) let dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/repository/rpki.cernet.net/repo/cernet/0"); .join("tests/fixtures/repository/rpki.cernet.net/repo/cernet/0");
let rsync_base_uri = "rsync://rpki.cernet.net/repo/cernet/0/"; let rsync_base_uri = "rsync://rpki.cernet.net/repo/cernet/0/";
@ -4013,18 +4017,22 @@ authorityKeyIdentifier = keyid:always
.get_raw_by_hash_entry(&issuer_hash) .get_raw_by_hash_entry(&issuer_hash)
.expect("load issuer raw entry") .expect("load issuer raw entry")
.expect("issuer raw entry present"); .expect("issuer raw entry present");
assert!(issuer_entry assert!(
issuer_entry
.origin_uris .origin_uris
.iter() .iter()
.any(|uri| uri.ends_with("BfycW4hQb3wNP4YsiJW-1n6fjro.cer"))); .any(|uri| uri.ends_with("BfycW4hQb3wNP4YsiJW-1n6fjro.cer"))
);
let first_output = objects let first_output = objects
.local_outputs_cache .local_outputs_cache
.first() .first()
.expect("first local output"); .expect("first local output");
assert!(store assert!(
store
.get_raw_by_hash_entry(&first_output.source_ee_cert_hash) .get_raw_by_hash_entry(&first_output.source_ee_cert_hash)
.expect("load source ee raw") .expect("load source ee raw")
.is_none()); .is_none()
);
} }
#[test] #[test]
@ -4100,9 +4108,11 @@ authorityKeyIdentifier = keyid:always
.expect("rebuild vcir local outputs"); .expect("rebuild vcir local outputs");
assert!(!local_outputs.is_empty()); assert!(!local_outputs.is_empty());
assert_eq!(local_outputs.len(), objects.vrps.len()); assert_eq!(local_outputs.len(), objects.vrps.len());
assert!(local_outputs assert!(
local_outputs
.iter() .iter()
.all(|output| output.output_type == VcirOutputType::Vrp)); .all(|output| output.output_type == VcirOutputType::Vrp)
);
} }
#[test] #[test]
@ -4275,10 +4285,15 @@ authorityKeyIdentifier = keyid:always
pack.manifest_bytes.len() as u64 pack.manifest_bytes.len() as u64
); );
let first_output = vcir.local_outputs.first().expect("local outputs stored"); let first_output = vcir.local_outputs.first().expect("local outputs stored");
assert!(store assert!(
.get_audit_rule_index_entry(crate::storage::AuditRuleKind::Roa, &first_output.rule_hash) store
.get_audit_rule_index_entry(
crate::storage::AuditRuleKind::Roa,
&first_output.rule_hash
)
.expect("get audit rule index entry") .expect("get audit rule index entry")
.is_some()); .is_some()
);
} }
#[test] #[test]
@ -4454,12 +4469,16 @@ authorityKeyIdentifier = keyid:always
&objects, &objects,
&[], &[],
); );
assert!(artifacts assert!(
artifacts
.iter() .iter()
.any(|artifact| artifact.artifact_role == VcirArtifactRole::Manifest)); .any(|artifact| artifact.artifact_role == VcirArtifactRole::Manifest)
assert!(artifacts );
assert!(
artifacts
.iter() .iter()
.any(|artifact| artifact.artifact_role == VcirArtifactRole::TrustAnchorCert)); .any(|artifact| artifact.artifact_role == VcirArtifactRole::TrustAnchorCert)
);
assert!(artifacts.iter().any(|artifact| artifact.uri.as_deref() assert!(artifacts.iter().any(|artifact| artifact.uri.as_deref()
== Some("rsync://example.test/repo/issuer/issuer.crl") == Some("rsync://example.test/repo/issuer/issuer.crl")
&& artifact.artifact_role == VcirArtifactRole::CurrentCrl)); && artifact.artifact_role == VcirArtifactRole::CurrentCrl));
@ -5402,11 +5421,13 @@ authorityKeyIdentifier = keyid:always
assert!(out.children.is_empty()); assert!(out.children.is_empty());
assert_eq!(out.audits.len(), 1); assert_eq!(out.audits.len(), 1);
assert!(matches!(out.audits[0].result, AuditObjectResult::Ok)); assert!(matches!(out.audits[0].result, AuditObjectResult::Ok));
assert!(out.audits[0] assert!(
out.audits[0]
.detail .detail
.as_deref() .as_deref()
.unwrap_or("") .unwrap_or("")
.contains("validated BGPsec router certificate")); .contains("validated BGPsec router certificate")
);
} }
#[test] #[test]
@ -5450,11 +5471,13 @@ authorityKeyIdentifier = keyid:always
assert!(out.children.is_empty()); assert!(out.children.is_empty());
assert_eq!(out.audits.len(), 1); assert_eq!(out.audits.len(), 1);
assert!(matches!(out.audits[0].result, AuditObjectResult::Skipped)); assert!(matches!(out.audits[0].result, AuditObjectResult::Skipped));
assert!(out.audits[0] assert!(
out.audits[0]
.detail .detail
.as_deref() .as_deref()
.unwrap_or("") .unwrap_or("")
.contains("not a CA resource certificate or BGPsec router certificate")); .contains("not a CA resource certificate or BGPsec router certificate")
);
} }
#[test] #[test]
@ -5498,11 +5521,13 @@ authorityKeyIdentifier = keyid:always
assert!(out.children.is_empty()); assert!(out.children.is_empty());
assert_eq!(out.audits.len(), 1); assert_eq!(out.audits.len(), 1);
assert!(matches!(out.audits[0].result, AuditObjectResult::Error)); assert!(matches!(out.audits[0].result, AuditObjectResult::Error));
assert!(out.audits[0] assert!(
out.audits[0]
.detail .detail
.as_deref() .as_deref()
.unwrap_or("") .unwrap_or("")
.contains("router certificate validation failed")); .contains("router certificate validation failed")
);
} }
#[test] #[test]
@ -5637,11 +5662,13 @@ authorityKeyIdentifier = keyid:always
assert!(out.children.is_empty()); assert!(out.children.is_empty());
assert_eq!(out.audits.len(), 1); assert_eq!(out.audits.len(), 1);
assert!(matches!(out.audits[0].result, AuditObjectResult::Error)); assert!(matches!(out.audits[0].result, AuditObjectResult::Error));
assert!(out.audits[0] assert!(
out.audits[0]
.detail .detail
.as_deref() .as_deref()
.unwrap_or("") .unwrap_or("")
.contains("issuer CA decode failed")); .contains("issuer CA decode failed")
);
} }
#[test] #[test]
@ -6187,8 +6214,8 @@ authorityKeyIdentifier = keyid:always
} }
#[test] #[test]
fn build_publication_point_audit_from_vcir_uses_vcir_metadata_and_overlays_child_and_object_audits( fn build_publication_point_audit_from_vcir_uses_vcir_metadata_and_overlays_child_and_object_audits()
) { {
let now = time::OffsetDateTime::now_utc(); let now = time::OffsetDateTime::now_utc();
let child_cert_hash = sha256_hex(b"child-cert"); let child_cert_hash = sha256_hex(b"child-cert");
let mut vcir = sample_vcir_for_projection(now, &child_cert_hash); let mut vcir = sample_vcir_for_projection(now, &child_cert_hash);
@ -6434,22 +6461,27 @@ authorityKeyIdentifier = keyid:always
.expect("reconstruct pack with partial related artifacts"); .expect("reconstruct pack with partial related artifacts");
assert_eq!(pack.manifest_bytes, manifest_bytes); assert_eq!(pack.manifest_bytes, manifest_bytes);
assert_eq!(pack.files.len(), 3, "crl + child cert + roa only"); assert_eq!(pack.files.len(), 3, "crl + child cert + roa only");
assert!(pack assert!(
pack.files
.iter()
.any(|file| file.rsync_uri.ends_with("issuer.crl"))
);
assert!(
pack.files
.iter()
.any(|file| file.rsync_uri.ends_with("child.cer"))
);
assert!(
pack.files
.iter()
.any(|file| file.rsync_uri.ends_with("a.roa"))
);
assert!(
!pack
.files .files
.iter() .iter()
.any(|file| file.rsync_uri.ends_with("issuer.crl"))); .any(|file| file.rsync_uri.ends_with("issuer.cer"))
assert!(pack );
.files
.iter()
.any(|file| file.rsync_uri.ends_with("child.cer")));
assert!(pack
.files
.iter()
.any(|file| file.rsync_uri.ends_with("a.roa")));
assert!(!pack
.files
.iter()
.any(|file| file.rsync_uri.ends_with("issuer.cer")));
assert!(warnings.iter().any(|warning| { assert!(warnings.iter().any(|warning| {
warning warning
.message .message

View File

@ -1,5 +1,7 @@
use rpki::ccr::{ use rpki::ccr::{
compute_state_hash, decode_content_info, dump_content_info_json_value, CcrContentInfo, CcrDigestAlgorithm, ManifestInstance, ManifestState, RoaPayloadSet,
RoaPayloadState, RouterKey, RouterKeySet, RouterKeyState, TrustAnchorState, compute_state_hash,
decode_content_info, dump_content_info_json_value,
encode::{ encode::{
encode_aspa_payload_state_payload_der, encode_content_info, encode_aspa_payload_state_payload_der, encode_content_info,
encode_manifest_state_payload_der, encode_roa_payload_state_payload_der, encode_manifest_state_payload_der, encode_roa_payload_state_payload_der,
@ -8,8 +10,6 @@ use rpki::ccr::{
verify::{ verify::{
verify_against_report_json_path, verify_against_vcir_store, verify_content_info_bytes, verify_against_report_json_path, verify_against_vcir_store, verify_content_info_bytes,
}, },
CcrContentInfo, CcrDigestAlgorithm, ManifestInstance, ManifestState, RoaPayloadSet,
RoaPayloadState, RouterKey, RouterKeySet, RouterKeyState, TrustAnchorState,
}; };
use rpki::data_model::common::BigUnsigned; use rpki::data_model::common::BigUnsigned;
use rpki::storage::{ use rpki::storage::{

View File

@ -1,4 +1,3 @@
use std::collections::BTreeSet;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
@ -15,7 +14,7 @@ fn skip_heavy_blackbox_test() -> bool {
} }
#[test] #[test]
fn cir_full_and_delta_pair_reuses_shared_static_pool() { fn cir_full_and_delta_pair_reuses_shared_repo_bytes_db() {
if skip_heavy_blackbox_test() { if skip_heavy_blackbox_test() {
return; return;
} }
@ -25,8 +24,6 @@ fn cir_full_and_delta_pair_reuses_shared_static_pool() {
let out = out_dir.path().join("cir-pair"); let out = out_dir.path().join("cir-pair");
let fixture_root = out_dir.path().join("fixture"); let fixture_root = out_dir.path().join("fixture");
std::fs::create_dir_all(&fixture_root).unwrap(); std::fs::create_dir_all(&fixture_root).unwrap();
let static_payload_root = fixture_root.join("payloads");
std::fs::create_dir_all(&static_payload_root).unwrap();
let base_locks = fixture_root.join("base-locks.json"); let base_locks = fixture_root.join("base-locks.json");
let delta_locks = fixture_root.join("locks-delta.json"); let delta_locks = fixture_root.join("locks-delta.json");
std::fs::write(&base_locks, br#"{"validationTime":"2026-03-16T11:49:15Z"}"#).unwrap(); std::fs::write(&base_locks, br#"{"validationTime":"2026-03-16T11:49:15Z"}"#).unwrap();
@ -130,7 +127,7 @@ MODE=""
cir="" cir=""
ccr="" ccr=""
report="" report=""
static_root="" repo_bytes_db=""
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--payload-replay-archive) MODE="full"; shift 2 ;; --payload-replay-archive) MODE="full"; shift 2 ;;
@ -138,22 +135,19 @@ while [[ $# -gt 0 ]]; do
--cir-out) cir="$2"; shift 2 ;; --cir-out) cir="$2"; shift 2 ;;
--ccr-out) ccr="$2"; shift 2 ;; --ccr-out) ccr="$2"; shift 2 ;;
--report-json) report="$2"; shift 2 ;; --report-json) report="$2"; shift 2 ;;
--cir-static-root) static_root="$2"; shift 2 ;; --repo-bytes-db) repo_bytes_db="$2"; shift 2 ;;
*) shift ;; *) shift ;;
esac esac
done done
mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$static_root/20260316/{{ab,cd,ef}}/00" mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$repo_bytes_db"
if [[ "$MODE" == "full" ]]; then if [[ "$MODE" == "full" ]]; then
cp "{full_cir}" "$cir" cp "{full_cir}" "$cir"
cp "{full_ccr}" "$ccr" cp "{full_ccr}" "$ccr"
cp "{full_report}" "$report" cp "{full_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
else else
cp "{delta_cir}" "$cir" cp "{delta_cir}" "$cir"
cp "{delta_ccr}" "$ccr" cp "{delta_ccr}" "$ccr"
cp "{delta_report}" "$report" cp "{delta_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
install -D -m 0644 "{payload_root}/delta-object" "$static_root/20260316/ef/00/{delta_hash}"
fi fi
"#, "#,
full_cir = full_cir_path.display(), full_cir = full_cir_path.display(),
@ -162,15 +156,10 @@ fi
delta_ccr = delta_ccr_path.display(), delta_ccr = delta_ccr_path.display(),
full_report = full_report_path.display(), full_report = full_report_path.display(),
delta_report = delta_report_path.display(), delta_report = delta_report_path.display(),
payload_root = static_payload_root.display(),
full_hash = full_obj_hash,
delta_hash = delta_obj_hash,
), ),
) )
.unwrap(); .unwrap();
std::fs::set_permissions(&stub, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap(); std::fs::set_permissions(&stub, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
std::fs::write(static_payload_root.join("full-object"), b"full-object").unwrap();
std::fs::write(static_payload_root.join("delta-object"), b"delta-object").unwrap();
let proc = Command::new(script) let proc = Command::new(script)
.args([ .args([
@ -222,30 +211,13 @@ fi
rpki::cir::decode_cir(&std::fs::read(out.join("delta-001").join("input.cir")).unwrap()) rpki::cir::decode_cir(&std::fs::read(out.join("delta-001").join("input.cir")).unwrap())
.expect("decode delta cir"); .expect("decode delta cir");
let mut hashes = BTreeSet::new(); assert!(!full_cir.objects.is_empty());
for item in &full_cir.objects { assert!(!delta_cir.objects.is_empty());
hashes.insert(hex::encode(&item.sha256));
}
for item in &delta_cir.objects {
hashes.insert(hex::encode(&item.sha256));
}
let static_file_count = walk(out.join("static")).len(); let summary: serde_json::Value =
assert_eq!(static_file_count, hashes.len()); serde_json::from_slice(&std::fs::read(out.join("summary.json")).unwrap()).unwrap();
assert_eq!(summary["repoBytesDbPath"], "repo-bytes.db");
assert!(out.join("summary.json").is_file()); assert_eq!(summary["repoBytesDbExists"], true);
assert!(out.join("full").join("result.ccr").is_file()); assert!(out.join("full").join("result.ccr").is_file());
assert!(out.join("delta-001").join("result.ccr").is_file()); assert!(out.join("delta-001").join("result.ccr").is_file());
} }
fn walk(path: std::path::PathBuf) -> Vec<std::path::PathBuf> {
let mut out = Vec::new();
if path.is_file() {
out.push(path);
} else if path.is_dir() {
for entry in std::fs::read_dir(path).unwrap() {
out.extend(walk(entry.unwrap().path()));
}
}
out
}

View File

@ -1,6 +1,7 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::ccr::{ use rpki::ccr::{
CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation, TrustAnchorState, CcrContentInfo, CcrDigestAlgorithm, RpkiCanonicalCacheRepresentation, TrustAnchorState,
encode_content_info, encode_content_info,
@ -15,7 +16,7 @@ fn cir_drop_report_counts_dropped_roa_objects_and_vrps() {
let cir_path = td.path().join("input.cir"); let cir_path = td.path().join("input.cir");
let ccr_path = td.path().join("output.ccr"); let ccr_path = td.path().join("output.ccr");
let report_path = td.path().join("report.json"); let report_path = td.path().join("report.json");
let static_root = td.path().join("static"); let repo_bytes_db = td.path().join("repo-bytes.db");
let json_out = td.path().join("drop.json"); let json_out = td.path().join("drop.json");
let md_out = td.path().join("drop.md"); let md_out = td.path().join("drop.md");
@ -26,12 +27,10 @@ fn cir_drop_report_counts_dropped_roa_objects_and_vrps() {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(&roa_bytes)) hex::encode(Sha256::digest(&roa_bytes))
}; };
let dir = static_root ExternalRepoBytesDb::open(&repo_bytes_db)
.join("20260409") .expect("open repo bytes")
.join(&hash[0..2]) .put_blob_bytes_batch(&[(hash.clone(), roa_bytes.clone())])
.join(&hash[2..4]); .expect("write repo bytes");
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(dir.join(&hash), &roa_bytes).unwrap();
let cir = CanonicalInputRepresentation { let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1, version: CIR_VERSION_V1,
@ -96,8 +95,8 @@ fn cir_drop_report_counts_dropped_roa_objects_and_vrps() {
ccr_path.to_string_lossy().as_ref(), ccr_path.to_string_lossy().as_ref(),
"--report-json", "--report-json",
report_path.to_string_lossy().as_ref(), report_path.to_string_lossy().as_ref(),
"--static-root", "--repo-bytes-db",
static_root.to_string_lossy().as_ref(), repo_bytes_db.to_string_lossy().as_ref(),
"--json-out", "--json-out",
json_out.to_string_lossy().as_ref(), json_out.to_string_lossy().as_ref(),
"--md-out", "--md-out",

View File

@ -1,9 +1,10 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::cir::{ use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
materialize_cir, materialize_cir_from_repo_bytes,
}; };
fn skip_heavy_script_replay_test() -> bool { fn skip_heavy_script_replay_test() -> bool {
@ -55,12 +56,13 @@ fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
) )
} }
fn write_static(root: &Path, date: &str, bytes: &[u8]) { fn write_repo_bytes(repo_bytes_db: &Path, bytes: &[u8]) {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes)); let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]); ExternalRepoBytesDb::open(repo_bytes_db)
std::fs::create_dir_all(&dir).expect("mkdir static"); .expect("open repo bytes")
std::fs::write(dir.join(hash), bytes).expect("write static object"); .put_blob_bytes_batch(&[(hash, bytes.to_vec())])
.expect("write repo bytes");
} }
fn prepare_reference_ccr( fn prepare_reference_ccr(
@ -120,15 +122,15 @@ fn cir_replay_matrix_script_matches_reference_for_all_participants() {
} }
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static"); let repo_bytes_db = td.path().join("repo-bytes.db");
let cir_path = td.path().join("sample.cir"); let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror"); let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("matrix-out"); let out_dir = td.path().join("matrix-out");
let (cir, ta_bytes) = build_ta_only_cir(); let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir"); std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes); write_repo_bytes(&repo_bytes_db, &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize"); materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root); let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = let script =
@ -146,8 +148,8 @@ fn cir_replay_matrix_script_matches_reference_for_all_participants() {
.args([ .args([
"--cir", "--cir",
cir_path.to_string_lossy().as_ref(), cir_path.to_string_lossy().as_ref(),
"--static-root", "--repo-bytes-db",
static_root.to_string_lossy().as_ref(), repo_bytes_db.to_string_lossy().as_ref(),
"--out-dir", "--out-dir",
out_dir.to_string_lossy().as_ref(), out_dir.to_string_lossy().as_ref(),
"--reference-ccr", "--reference-ccr",

View File

@ -1,9 +1,10 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::cir::{ use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
materialize_cir, materialize_cir_from_repo_bytes,
}; };
fn skip_heavy_script_replay_test() -> bool { fn skip_heavy_script_replay_test() -> bool {
@ -55,12 +56,13 @@ fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
) )
} }
fn write_static(root: &Path, date: &str, bytes: &[u8]) { fn write_repo_bytes(repo_bytes_db: &Path, bytes: &[u8]) {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes)); let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]); ExternalRepoBytesDb::open(repo_bytes_db)
std::fs::create_dir_all(&dir).expect("mkdir static"); .expect("open repo bytes")
std::fs::write(dir.join(hash), bytes).expect("write static object"); .put_blob_bytes_batch(&[(hash, bytes.to_vec())])
.expect("write repo bytes");
} }
fn prepare_reference_ccr( fn prepare_reference_ccr(
@ -118,15 +120,15 @@ fn cir_routinator_script_matches_reference_on_ta_only_cir() {
return; return;
} }
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static"); let repo_bytes_db = td.path().join("repo-bytes.db");
let cir_path = td.path().join("sample.cir"); let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror"); let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("routinator-out"); let out_dir = td.path().join("routinator-out");
let (cir, ta_bytes) = build_ta_only_cir(); let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir"); std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes); write_repo_bytes(&repo_bytes_db, &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize"); materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root); let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = let script =
@ -144,8 +146,8 @@ fn cir_routinator_script_matches_reference_on_ta_only_cir() {
.args([ .args([
"--cir", "--cir",
cir_path.to_string_lossy().as_ref(), cir_path.to_string_lossy().as_ref(),
"--static-root", "--repo-bytes-db",
static_root.to_string_lossy().as_ref(), repo_bytes_db.to_string_lossy().as_ref(),
"--out-dir", "--out-dir",
out_dir.to_string_lossy().as_ref(), out_dir.to_string_lossy().as_ref(),
"--reference-ccr", "--reference-ccr",
@ -177,15 +179,15 @@ fn cir_rpki_client_script_matches_reference_on_ta_only_cir() {
return; return;
} }
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static"); let repo_bytes_db = td.path().join("repo-bytes.db");
let cir_path = td.path().join("sample.cir"); let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror"); let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("rpki-client-out"); let out_dir = td.path().join("rpki-client-out");
let (cir, ta_bytes) = build_ta_only_cir(); let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir"); std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes); write_repo_bytes(&repo_bytes_db, &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize"); materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root); let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = let script =
@ -203,8 +205,8 @@ fn cir_rpki_client_script_matches_reference_on_ta_only_cir() {
.args([ .args([
"--cir", "--cir",
cir_path.to_string_lossy().as_ref(), cir_path.to_string_lossy().as_ref(),
"--static-root", "--repo-bytes-db",
static_root.to_string_lossy().as_ref(), repo_bytes_db.to_string_lossy().as_ref(),
"--out-dir", "--out-dir",
out_dir.to_string_lossy().as_ref(), out_dir.to_string_lossy().as_ref(),
"--reference-ccr", "--reference-ccr",

View File

@ -132,7 +132,7 @@ MODE=""
cir="" cir=""
ccr="" ccr=""
report="" report=""
static_root="" repo_bytes_db=""
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--payload-replay-archive) MODE="full"; shift 2 ;; --payload-replay-archive) MODE="full"; shift 2 ;;
@ -140,16 +140,15 @@ while [[ $# -gt 0 ]]; do
--cir-out) cir="$2"; shift 2 ;; --cir-out) cir="$2"; shift 2 ;;
--ccr-out) ccr="$2"; shift 2 ;; --ccr-out) ccr="$2"; shift 2 ;;
--report-json) report="$2"; shift 2 ;; --report-json) report="$2"; shift 2 ;;
--cir-static-root) static_root="$2"; shift 2 ;; --repo-bytes-db) repo_bytes_db="$2"; shift 2 ;;
*) shift ;; *) shift ;;
esac esac
done done
mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$static_root/20260316/{{ab,cd,ef}}/00" mkdir -p "$(dirname "$cir")" "$(dirname "$ccr")" "$(dirname "$report")" "$repo_bytes_db"
if [[ "$MODE" == "full" ]]; then if [[ "$MODE" == "full" ]]; then
cp "{full_cir}" "$cir" cp "{full_cir}" "$cir"
cp "{full_ccr}" "$ccr" cp "{full_ccr}" "$ccr"
cp "{full_report}" "$report" cp "{full_report}" "$report"
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
else else
if [[ "$cir" == *delta-001* ]]; then if [[ "$cir" == *delta-001* ]]; then
cp "{delta_cir}" "$cir" cp "{delta_cir}" "$cir"
@ -160,8 +159,6 @@ else
cp "{delta_ccr}" "$ccr" cp "{delta_ccr}" "$ccr"
cp "{delta_report}" "$report" cp "{delta_report}" "$report"
fi fi
install -D -m 0644 "{payload_root}/full-object" "$static_root/20260316/ab/cd/{full_hash}"
install -D -m 0644 "{payload_root}/delta-object" "$static_root/20260316/ef/00/{delta_hash}"
fi fi
"#, "#,
full_cir = full_cir_path.display(), full_cir = full_cir_path.display(),
@ -170,9 +167,6 @@ fi
delta_ccr = delta_ccr_path.display(), delta_ccr = delta_ccr_path.display(),
full_report = full_report_path.display(), full_report = full_report_path.display(),
delta_report = delta_report_path.display(), delta_report = delta_report_path.display(),
payload_root = static_payload_root.display(),
full_hash = full_hash,
delta_hash = delta_hash,
), ),
) )
.unwrap(); .unwrap();

View File

@ -1,9 +1,10 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::cir::{ use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
materialize_cir, materialize_cir_from_repo_bytes,
}; };
fn skip_heavy_script_replay_test() -> bool { fn skip_heavy_script_replay_test() -> bool {
@ -55,12 +56,13 @@ fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
) )
} }
fn write_static(root: &Path, date: &str, bytes: &[u8]) { fn write_repo_bytes(repo_bytes_db: &Path, bytes: &[u8]) {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes)); let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]); ExternalRepoBytesDb::open(repo_bytes_db)
std::fs::create_dir_all(&dir).expect("mkdir static"); .expect("open repo bytes")
std::fs::write(dir.join(hash), bytes).expect("write static object"); .put_blob_bytes_batch(&[(hash, bytes.to_vec())])
.expect("write repo bytes");
} }
fn prepare_reference_ccr( fn prepare_reference_ccr(
@ -110,7 +112,7 @@ fn prepare_reference_ccr(
fn prepare_sequence_root(td: &Path) -> PathBuf { fn prepare_sequence_root(td: &Path) -> PathBuf {
let sequence_root = td.join("sequence"); let sequence_root = td.join("sequence");
let static_root = sequence_root.join("static"); let repo_bytes_db = sequence_root.join("repo-bytes.db");
let mirror_root = td.join("mirror"); let mirror_root = td.join("mirror");
std::fs::create_dir_all(sequence_root.join("full")).unwrap(); std::fs::create_dir_all(sequence_root.join("full")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap(); std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap();
@ -129,8 +131,8 @@ fn prepare_sequence_root(td: &Path) -> PathBuf {
&cir_bytes, &cir_bytes,
) )
.unwrap(); .unwrap();
write_static(&static_root, "20260407", &ta_bytes); write_repo_bytes(&repo_bytes_db, &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).unwrap(); materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true).unwrap();
let reference = prepare_reference_ccr(td, &cir, &mirror_root); let reference = prepare_reference_ccr(td, &cir, &mirror_root);
std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap(); std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap();
std::fs::copy( std::fs::copy(
@ -148,7 +150,7 @@ fn prepare_sequence_root(td: &Path) -> PathBuf {
std::fs::write(sequence_root.join("delta-002").join("report.json"), b"{}").unwrap(); std::fs::write(sequence_root.join("delta-002").join("report.json"), b"{}").unwrap();
let sequence = serde_json::json!({ let sequence = serde_json::json!({
"version": 1, "version": 1,
"staticRoot": "static", "repoBytesDbPath": "repo-bytes.db",
"steps": [ "steps": [
{"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null}, {"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null},
{"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"}, {"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"},

View File

@ -1,9 +1,10 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
use rpki::blob_store::ExternalRepoBytesDb;
use rpki::cir::{ use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir, CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
materialize_cir, materialize_cir_from_repo_bytes,
}; };
fn skip_heavy_script_replay_test() -> bool { fn skip_heavy_script_replay_test() -> bool {
@ -55,12 +56,13 @@ fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
) )
} }
fn write_static(root: &Path, date: &str, bytes: &[u8]) { fn write_repo_bytes(repo_bytes_db: &Path, bytes: &[u8]) {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes)); let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]); ExternalRepoBytesDb::open(repo_bytes_db)
std::fs::create_dir_all(&dir).expect("mkdir static"); .expect("open repo bytes")
std::fs::write(dir.join(hash), bytes).expect("write static object"); .put_blob_bytes_batch(&[(hash, bytes.to_vec())])
.expect("write repo bytes");
} }
fn prepare_reference_ccr( fn prepare_reference_ccr(
@ -118,7 +120,7 @@ fn ours_sequence_replay_script_replays_all_steps() {
} }
let td = tempfile::tempdir().expect("tempdir"); let td = tempfile::tempdir().expect("tempdir");
let sequence_root = td.path().join("sequence"); let sequence_root = td.path().join("sequence");
let static_root = sequence_root.join("static"); let repo_bytes_db = sequence_root.join("repo-bytes.db");
let mirror_root = td.path().join("mirror"); let mirror_root = td.path().join("mirror");
std::fs::create_dir_all(sequence_root.join("full")).unwrap(); std::fs::create_dir_all(sequence_root.join("full")).unwrap();
std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap(); std::fs::create_dir_all(sequence_root.join("delta-001")).unwrap();
@ -137,8 +139,8 @@ fn ours_sequence_replay_script_replays_all_steps() {
&cir_bytes, &cir_bytes,
) )
.unwrap(); .unwrap();
write_static(&static_root, "20260407", &ta_bytes); write_repo_bytes(&repo_bytes_db, &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).unwrap(); materialize_cir_from_repo_bytes(&cir, &repo_bytes_db, &mirror_root, true).unwrap();
let reference = prepare_reference_ccr(td.path(), &cir, &mirror_root); let reference = prepare_reference_ccr(td.path(), &cir, &mirror_root);
std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap(); std::fs::copy(&reference, sequence_root.join("full").join("result.ccr")).unwrap();
std::fs::copy( std::fs::copy(
@ -157,7 +159,7 @@ fn ours_sequence_replay_script_replays_all_steps() {
let sequence = serde_json::json!({ let sequence = serde_json::json!({
"version": 1, "version": 1,
"staticRoot": "static", "repoBytesDbPath": "repo-bytes.db",
"steps": [ "steps": [
{"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null}, {"stepId":"full","kind":"full","validationTime":"2026-04-07T00:00:00Z","cirPath":"full/input.cir","ccrPath":"full/result.ccr","reportPath":"full/report.json","previousStepId":null},
{"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"}, {"stepId":"delta-001","kind":"delta","validationTime":"2026-04-07T00:00:00Z","cirPath":"delta-001/input.cir","ccrPath":"delta-001/result.ccr","reportPath":"delta-001/report.json","previousStepId":"full"},

View File

@ -10,7 +10,7 @@ use rpki::storage::{
VcirArtifactValidationStatus, VcirAuditSummary, VcirCcrManifestProjection, VcirInstanceGate, VcirArtifactValidationStatus, VcirAuditSummary, VcirCcrManifestProjection, VcirInstanceGate,
VcirRelatedArtifact, VcirSummary, VcirRelatedArtifact, VcirSummary,
}; };
use rpki::validation::manifest::{process_manifest_publication_point, PublicationPointSource}; use rpki::validation::manifest::{PublicationPointSource, process_manifest_publication_point};
fn issuer_ca_fixture() -> Vec<u8> { fn issuer_ca_fixture() -> Vec<u8> {
std::fs::read( std::fs::read(

View File

@ -11,8 +11,8 @@ use rpki::storage::{
VcirRelatedArtifact, VcirSummary, VcirRelatedArtifact, VcirSummary,
}; };
use rpki::validation::manifest::{ use rpki::validation::manifest::{
process_manifest_publication_point, process_manifest_publication_point_after_repo_sync, ManifestProcessError, PublicationPointSource, process_manifest_publication_point,
ManifestProcessError, PublicationPointSource, process_manifest_publication_point_after_repo_sync,
}; };
fn issuer_ca_fixture_der() -> Vec<u8> { fn issuer_ca_fixture_der() -> Vec<u8> {

View File

@ -2,9 +2,9 @@ use rpki::parallel::config::ParallelPhase2Config;
use rpki::policy::{Policy, SignedObjectFailurePolicy}; use rpki::policy::{Policy, SignedObjectFailurePolicy};
use rpki::storage::{PackFile, PackTime}; use rpki::storage::{PackFile, PackTime};
use rpki::validation::objects::{ use rpki::validation::objects::{
process_publication_point_for_issuer_parallel_roa_with_pool, ParallelRoaWorkerPool, process_publication_point_for_issuer_parallel_roa_with_pool,
process_publication_point_snapshot_for_issuer, process_publication_point_snapshot_for_issuer,
process_publication_point_snapshot_for_issuer_parallel_roa, ParallelRoaWorkerPool, process_publication_point_snapshot_for_issuer_parallel_roa,
}; };
use rpki::validation::publication_point::PublicationPointSnapshot; use rpki::validation::publication_point::PublicationPointSnapshot;