20260407 & 20260408 基于cir 三方replay对齐,并且materialize 使用hard link优化

This commit is contained in:
yuyr 2026-04-08 16:27:46 +08:00
parent 34fb9657f1
commit c9ef5aaf4c
36 changed files with 8743 additions and 31 deletions

56
scripts/cir/README.md Normal file
View File

@ -0,0 +1,56 @@
# CIR Scripts
## `cir-rsync-wrapper`
一个用于 CIR 黑盒 replay 的 rsync wrapper。
### 环境变量
- `REAL_RSYNC_BIN`
- 真实 rsync 二进制路径
- 默认优先 `/usr/bin/rsync`
- `CIR_MIRROR_ROOT`
- 本地镜像树根目录
- 当命令行中出现 `rsync://...` source 时必需
### 语义
- 仅改写 `rsync://host/path` 类型参数
- 其它参数原样透传给真实 rsync
- 改写目标:
- `rsync://example.net/repo/a.roa`
- →
- `<CIR_MIRROR_ROOT>/example.net/repo/a.roa`
### 兼容目标
- Routinator `--rsync-command`
- `rpki-client -e rsync_prog`
## 其它脚本
- `run_cir_replay_ours.sh`
- `run_cir_replay_routinator.sh`
- `run_cir_replay_rpki_client.sh`
- `run_cir_replay_matrix.sh`
## `cir-local-link-sync.py`
`CIR_LOCAL_LINK_MODE=1` 且 wrapper 检测到 source 已经被改写为本地 mirror 路径时,
wrapper 不再调用真实 `rsync`,而是调用这个 helper 完成:
- `hardlink` 优先的本地树同步
- 失败时回退到 copy
- 支持 `--delete`
`run_cir_replay_matrix.sh` 会顺序执行:
- `ours`
- Routinator
- `rpki-client`
并汇总生成:
- `summary.json`
- `summary.md`
- `detail.md`

View File

@ -0,0 +1,136 @@
#!/usr/bin/env python3
import argparse
import errno
import os
import shutil
from pathlib import Path
def _same_inode(src: Path, dst: Path) -> bool:
try:
src_stat = src.stat()
dst_stat = dst.stat()
except FileNotFoundError:
return False
return (src_stat.st_dev, src_stat.st_ino) == (dst_stat.st_dev, dst_stat.st_ino)
def _remove_path(path: Path) -> None:
if not path.exists() and not path.is_symlink():
return
if path.is_dir() and not path.is_symlink():
shutil.rmtree(path)
else:
path.unlink()
def _prune_empty_dirs(root: Path) -> None:
if not root.exists():
return
for path in sorted((p for p in root.rglob("*") if p.is_dir()), key=lambda p: len(p.parts), reverse=True):
try:
path.rmdir()
except OSError:
pass
def _link_or_copy(src: Path, dst: Path) -> str:
dst.parent.mkdir(parents=True, exist_ok=True)
if dst.exists() or dst.is_symlink():
if _same_inode(src, dst):
return "reused"
_remove_path(dst)
try:
os.link(src, dst)
return "linked"
except OSError as err:
if err.errno not in (errno.EXDEV, errno.EPERM, errno.EMLINK, errno.ENOTSUP, errno.EACCES):
raise
shutil.copy2(src, dst)
return "copied"
def _file_map(src_arg: str, dest_arg: str) -> tuple[Path, dict[str, Path]]:
src = Path(src_arg.rstrip(os.sep))
if not src.exists():
raise FileNotFoundError(src)
mapping: dict[str, Path] = {}
if src.is_dir():
copy_contents = src_arg.endswith(os.sep)
if copy_contents:
root = src
for path in root.rglob("*"):
if path.is_file():
mapping[path.relative_to(root).as_posix()] = path
else:
root = src
base = src.name
for path in root.rglob("*"):
if path.is_file():
rel = Path(base) / path.relative_to(root)
mapping[rel.as_posix()] = path
else:
dest_path = Path(dest_arg)
if dest_arg.endswith(os.sep) or dest_path.is_dir():
mapping[src.name] = src
else:
mapping[dest_path.name] = src
return Path(dest_arg), mapping
def sync_local_tree(src_arg: str, dst_arg: str, delete: bool) -> dict[str, int]:
dst_root, mapping = _file_map(src_arg, dst_arg)
dst_root.mkdir(parents=True, exist_ok=True)
expected = {dst_root / rel for rel in mapping.keys()}
deleted = 0
if delete and dst_root.exists():
for path in sorted(dst_root.rglob("*"), key=lambda p: len(p.parts), reverse=True):
if path.is_dir():
continue
if path not in expected:
_remove_path(path)
deleted += 1
_prune_empty_dirs(dst_root)
linked = 0
copied = 0
reused = 0
for rel, src in mapping.items():
dst = dst_root / rel
result = _link_or_copy(src, dst)
if result == "linked":
linked += 1
elif result == "copied":
copied += 1
else:
reused += 1
return {
"files": len(mapping),
"linked": linked,
"copied": copied,
"reused": reused,
"deleted": deleted,
}
def main() -> int:
parser = argparse.ArgumentParser(description="Sync a local CIR mirror tree using hardlinks when possible.")
parser.add_argument("--delete", action="store_true", help="Delete target files not present in source")
parser.add_argument("source")
parser.add_argument("dest")
args = parser.parse_args()
summary = sync_local_tree(args.source, args.dest, args.delete)
print(
"local-link-sync files={files} linked={linked} copied={copied} reused={reused} deleted={deleted}".format(
**summary
)
)
return 0
if __name__ == "__main__":
raise SystemExit(main())

127
scripts/cir/cir-rsync-wrapper Executable file
View File

@ -0,0 +1,127 @@
#!/usr/bin/env python3
import os
import shutil
import sys
from pathlib import Path
from urllib.parse import urlparse
def real_rsync_bin() -> str:
env = os.environ.get("REAL_RSYNC_BIN")
if env:
return env
default = "/usr/bin/rsync"
if Path(default).exists():
return default
found = shutil.which("rsync")
if found:
return found
raise SystemExit("cir-rsync-wrapper: REAL_RSYNC_BIN is not set and rsync was not found")
def rewrite_arg(arg: str, mirror_root: str | None) -> str:
if not arg.startswith("rsync://"):
return arg
if not mirror_root:
raise SystemExit(
"cir-rsync-wrapper: CIR_MIRROR_ROOT is required when an rsync:// source is present"
)
parsed = urlparse(arg)
if parsed.scheme != "rsync" or not parsed.hostname:
raise SystemExit(f"cir-rsync-wrapper: invalid rsync URI: {arg}")
path = parsed.path.lstrip("/")
local = Path(mirror_root).resolve() / parsed.hostname
if path:
local = local / path
local_str = str(local)
if local.exists() and local.is_dir() and not local_str.endswith("/"):
local_str += "/"
elif arg.endswith("/") and not local_str.endswith("/"):
local_str += "/"
return local_str
def filter_args(args: list[str]) -> list[str]:
mirror_root = os.environ.get("CIR_MIRROR_ROOT")
rewritten_any = any(arg.startswith("rsync://") for arg in args)
out: list[str] = []
i = 0
while i < len(args):
arg = args[i]
if rewritten_any:
if arg == "--address":
i += 2
continue
if arg.startswith("--address="):
i += 1
continue
if arg == "--contimeout":
i += 2
continue
if arg.startswith("--contimeout="):
i += 1
continue
out.append(rewrite_arg(arg, mirror_root))
i += 1
return out
def local_link_mode_enabled() -> bool:
value = os.environ.get("CIR_LOCAL_LINK_MODE", "")
return value.lower() in {"1", "true", "yes", "on"}
def extract_source_and_dest(args: list[str]) -> tuple[str, str]:
expects_value = {
"--timeout",
"--min-size",
"--max-size",
"--include",
"--exclude",
"--compare-dest",
}
positionals: list[str] = []
i = 0
while i < len(args):
arg = args[i]
if arg in expects_value:
i += 2
continue
if any(arg.startswith(prefix + "=") for prefix in expects_value):
i += 1
continue
if arg.startswith("-"):
i += 1
continue
positionals.append(arg)
i += 1
if len(positionals) < 2:
raise SystemExit("cir-rsync-wrapper: expected source and destination arguments")
return positionals[-2], positionals[-1]
def maybe_exec_local_link_sync(args: list[str], rewritten_any: bool) -> None:
if not rewritten_any or not local_link_mode_enabled():
return
source, dest = extract_source_and_dest(args)
if source.startswith("rsync://"):
raise SystemExit("cir-rsync-wrapper: expected rewritten local source for CIR_LOCAL_LINK_MODE")
helper = Path(__file__).with_name("cir-local-link-sync.py")
cmd = [sys.executable, str(helper)]
if "--delete" in args:
cmd.append("--delete")
cmd.extend([source, dest])
os.execv(sys.executable, cmd)
def main() -> int:
args = sys.argv[1:]
rewritten_any = any(arg.startswith("rsync://") for arg in args)
rewritten = filter_args(args)
maybe_exec_local_link_sync(rewritten, rewritten_any)
os.execv(real_rsync_bin(), [real_rsync_bin(), *rewritten])
return 127
if __name__ == "__main__":
raise SystemExit(main())

50
scripts/cir/json_to_vaps_csv.py Executable file
View File

@ -0,0 +1,50 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import csv
import json
from pathlib import Path
def normalize_asn(value: str | int) -> str:
text = str(value).strip().upper()
if text.startswith("AS"):
text = text[2:]
return f"AS{int(text)}"
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--input", required=True, type=Path)
parser.add_argument("--csv-out", required=True, type=Path)
args = parser.parse_args()
obj = json.loads(args.input.read_text(encoding="utf-8"))
rows: list[tuple[str, str, str]] = []
for aspa in obj.get("aspas", []):
providers = sorted(
{normalize_asn(item) for item in aspa.get("providers", [])},
key=lambda s: int(s[2:]),
)
rows.append(
(
normalize_asn(aspa["customer"]),
";".join(providers),
str(aspa.get("ta", "")).strip().lower(),
)
)
rows.sort(key=lambda row: (int(row[0][2:]), row[1], row[2]))
args.csv_out.parent.mkdir(parents=True, exist_ok=True)
with args.csv_out.open("w", encoding="utf-8", newline="") as fh:
writer = csv.writer(fh)
writer.writerow(["Customer ASN", "Providers", "Trust Anchor"])
writer.writerows(rows)
print(args.csv_out)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@ -0,0 +1,286 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_matrix.sh \
--cir <path> \
--static-root <path> \
--out-dir <path> \
--reference-ccr <path> \
--rpki-client-build-dir <path> \
[--keep-db] \
[--rpki-bin <path>] \
[--routinator-root <path>] \
[--routinator-bin <path>] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR=""
STATIC_ROOT=""
OUT_DIR=""
REFERENCE_CCR=""
RPKI_CLIENT_BUILD_DIR=""
KEEP_DB=0
RPKI_BIN="${RPKI_BIN:-$ROOT_DIR/target/release/rpki}"
ROUTINATOR_ROOT="${ROUTINATOR_ROOT:-/home/yuyr/dev/rust_playground/routinator}"
ROUTINATOR_BIN="${ROUTINATOR_BIN:-$ROUTINATOR_ROOT/target/debug/routinator}"
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
OURS_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_ours.sh"
ROUTINATOR_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_routinator.sh"
RPKI_CLIENT_SCRIPT="$ROOT_DIR/scripts/cir/run_cir_replay_rpki_client.sh"
while [[ $# -gt 0 ]]; do
case "$1" in
--cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--rpki-client-build-dir) RPKI_CLIENT_BUILD_DIR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
--routinator-root) ROUTINATOR_ROOT="$2"; shift 2 ;;
--routinator-bin) ROUTINATOR_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$CIR" && -n "$STATIC_ROOT" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" && -n "$RPKI_CLIENT_BUILD_DIR" ]] || {
usage >&2
exit 2
}
mkdir -p "$OUT_DIR"
run_with_timing() {
local summary_path="$1"
local timing_path="$2"
shift 2
local start end status
start="$(python3 - <<'PY'
import time
print(time.perf_counter_ns())
PY
)"
if "$@"; then
status=0
else
status=$?
fi
end="$(python3 - <<'PY'
import time
print(time.perf_counter_ns())
PY
)"
python3 - <<'PY' "$summary_path" "$timing_path" "$status" "$start" "$end"
import json, sys
summary_path, timing_path, status, start, end = sys.argv[1:]
duration_ms = max(0, (int(end) - int(start)) // 1_000_000)
data = {"exitCode": int(status), "durationMs": duration_ms}
try:
with open(summary_path, "r", encoding="utf-8") as f:
data["compare"] = json.load(f)
except FileNotFoundError:
data["compare"] = None
with open(timing_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
PY
return "$status"
}
OURS_OUT="$OUT_DIR/ours"
ROUTINATOR_OUT="$OUT_DIR/routinator"
RPKI_CLIENT_OUT="$OUT_DIR/rpki-client"
mkdir -p "$OURS_OUT" "$ROUTINATOR_OUT" "$RPKI_CLIENT_OUT"
ours_cmd=(
"$OURS_SCRIPT"
--cir "$CIR"
--static-root "$STATIC_ROOT"
--out-dir "$OURS_OUT"
--reference-ccr "$REFERENCE_CCR"
--rpki-bin "$RPKI_BIN"
--real-rsync-bin "$REAL_RSYNC_BIN"
)
routinator_cmd=(
"$ROUTINATOR_SCRIPT"
--cir "$CIR"
--static-root "$STATIC_ROOT"
--out-dir "$ROUTINATOR_OUT"
--reference-ccr "$REFERENCE_CCR"
--routinator-root "$ROUTINATOR_ROOT"
--routinator-bin "$ROUTINATOR_BIN"
--real-rsync-bin "$REAL_RSYNC_BIN"
)
rpki_client_cmd=(
"$RPKI_CLIENT_SCRIPT"
--cir "$CIR"
--static-root "$STATIC_ROOT"
--out-dir "$RPKI_CLIENT_OUT"
--reference-ccr "$REFERENCE_CCR"
--build-dir "$RPKI_CLIENT_BUILD_DIR"
--real-rsync-bin "$REAL_RSYNC_BIN"
)
if [[ "$KEEP_DB" -eq 1 ]]; then
ours_cmd+=(--keep-db)
routinator_cmd+=(--keep-db)
rpki_client_cmd+=(--keep-db)
fi
ours_status=0
routinator_status=0
rpki_client_status=0
if run_with_timing "$OURS_OUT/compare-summary.json" "$OURS_OUT/timing.json" "${ours_cmd[@]}"; then
:
else
ours_status=$?
fi
if run_with_timing "$ROUTINATOR_OUT/compare-summary.json" "$ROUTINATOR_OUT/timing.json" "${routinator_cmd[@]}"; then
:
else
routinator_status=$?
fi
if run_with_timing "$RPKI_CLIENT_OUT/compare-summary.json" "$RPKI_CLIENT_OUT/timing.json" "${rpki_client_cmd[@]}"; then
:
else
rpki_client_status=$?
fi
SUMMARY_JSON="$OUT_DIR/summary.json"
SUMMARY_MD="$OUT_DIR/summary.md"
DETAIL_MD="$OUT_DIR/detail.md"
python3 - <<'PY' \
"$CIR" \
"$STATIC_ROOT" \
"$REFERENCE_CCR" \
"$OURS_OUT" \
"$ROUTINATOR_OUT" \
"$RPKI_CLIENT_OUT" \
"$SUMMARY_JSON" \
"$SUMMARY_MD" \
"$DETAIL_MD"
import json
import sys
from pathlib import Path
cir_path, static_root, reference_ccr, ours_out, routinator_out, rpki_client_out, summary_json, summary_md, detail_md = sys.argv[1:]
participants = []
all_match = True
for name, out_dir in [
("ours", ours_out),
("routinator", routinator_out),
("rpki-client", rpki_client_out),
]:
out = Path(out_dir)
timing = json.loads((out / "timing.json").read_text(encoding="utf-8"))
compare = timing.get("compare") or {}
vrps = compare.get("vrps") or {}
vaps = compare.get("vaps") or {}
participant = {
"name": name,
"outDir": str(out),
"tmpRoot": str(out / ".tmp"),
"mirrorPath": str(out / ".tmp" / "mirror"),
"timingPath": str(out / "timing.json"),
"summaryPath": str(out / "compare-summary.json"),
"exitCode": timing["exitCode"],
"durationMs": timing["durationMs"],
"vrps": vrps,
"vaps": vaps,
"match": bool(vrps.get("match")) and bool(vaps.get("match")) and timing["exitCode"] == 0,
"logPaths": [str(path) for path in sorted(out.glob("*.log"))],
}
participants.append(participant)
all_match = all_match and participant["match"]
summary = {
"cirPath": cir_path,
"staticRoot": static_root,
"referenceCcr": reference_ccr,
"participants": participants,
"allMatch": all_match,
}
Path(summary_json).write_text(json.dumps(summary, indent=2), encoding="utf-8")
lines = [
"# CIR Replay Matrix Summary",
"",
f"- `cir`: `{cir_path}`",
f"- `static_root`: `{static_root}`",
f"- `reference_ccr`: `{reference_ccr}`",
f"- `all_match`: `{all_match}`",
"",
"| Participant | Exit | Duration (ms) | VRP actual/ref | VRP match | VAP actual/ref | VAP match | Log |",
"| --- | ---: | ---: | --- | --- | --- | --- | --- |",
]
for participant in participants:
vrps = participant["vrps"] or {}
vaps = participant["vaps"] or {}
log_path = participant["logPaths"][0] if participant["logPaths"] else ""
lines.append(
"| {name} | {exit_code} | {duration_ms} | {vrp_actual}/{vrp_ref} | {vrp_match} | {vap_actual}/{vap_ref} | {vap_match} | `{log_path}` |".format(
name=participant["name"],
exit_code=participant["exitCode"],
duration_ms=participant["durationMs"],
vrp_actual=vrps.get("actual", "-"),
vrp_ref=vrps.get("reference", "-"),
vrp_match=vrps.get("match", False),
vap_actual=vaps.get("actual", "-"),
vap_ref=vaps.get("reference", "-"),
vap_match=vaps.get("match", False),
log_path=log_path,
)
)
Path(summary_md).write_text("\n".join(lines) + "\n", encoding="utf-8")
detail_lines = [
"# CIR Replay Matrix Detail",
"",
]
for participant in participants:
vrps = participant["vrps"] or {}
vaps = participant["vaps"] or {}
detail_lines.extend([
f"## {participant['name']}",
f"- `exit_code`: `{participant['exitCode']}`",
f"- `duration_ms`: `{participant['durationMs']}`",
f"- `out_dir`: `{participant['outDir']}`",
f"- `tmp_root`: `{participant['tmpRoot']}`",
f"- `mirror_path`: `{participant['mirrorPath']}`",
f"- `summary_path`: `{participant['summaryPath']}`",
f"- `timing_path`: `{participant['timingPath']}`",
f"- `log_paths`: `{', '.join(participant['logPaths'])}`",
f"- `vrps`: `actual={vrps.get('actual', '-')}` `reference={vrps.get('reference', '-')}` `match={vrps.get('match', False)}`",
f"- `vaps`: `actual={vaps.get('actual', '-')}` `reference={vaps.get('reference', '-')}` `match={vaps.get('match', False)}`",
f"- `vrps.only_in_actual`: `{vrps.get('only_in_actual', [])}`",
f"- `vrps.only_in_reference`: `{vrps.get('only_in_reference', [])}`",
f"- `vaps.only_in_actual`: `{vaps.get('only_in_actual', [])}`",
f"- `vaps.only_in_reference`: `{vaps.get('only_in_reference', [])}`",
"",
])
Path(detail_md).write_text("\n".join(detail_lines), encoding="utf-8")
PY
if [[ "$ours_status" -ne 0 || "$routinator_status" -ne 0 || "$rpki_client_status" -ne 0 ]]; then
exit 1
fi
all_match="$(python3 - <<'PY' "$SUMMARY_JSON"
import json,sys
print("true" if json.load(open(sys.argv[1]))["allMatch"] else "false")
PY
)"
if [[ "$all_match" != "true" ]]; then
exit 1
fi
echo "done: $OUT_DIR"

View File

@ -0,0 +1,150 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_ours.sh \
--cir <path> \
--static-root <path> \
--out-dir <path> \
--reference-ccr <path> \
[--keep-db] \
[--rpki-bin <path>] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR=""
STATIC_ROOT=""
OUT_DIR=""
REFERENCE_CCR=""
KEEP_DB=0
RPKI_BIN="$ROOT_DIR/target/release/rpki"
CIR_MATERIALIZE_BIN="$ROOT_DIR/target/release/cir_materialize"
CIR_EXTRACT_INPUTS_BIN="$ROOT_DIR/target/release/cir_extract_inputs"
CCR_TO_COMPARE_VIEWS_BIN="$ROOT_DIR/target/release/ccr_to_compare_views"
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
while [[ $# -gt 0 ]]; do
case "$1" in
--cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;;
--rpki-bin) RPKI_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$CIR" && -n "$STATIC_ROOT" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || {
usage >&2
exit 2
}
mkdir -p "$OUT_DIR"
if [[ ! -x "$RPKI_BIN" || ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin rpki --bin cir_materialize --bin cir_extract_inputs --bin ccr_to_compare_views
)
fi
TMP_ROOT="$OUT_DIR/.tmp"
TALS_DIR="$TMP_ROOT/tals"
META_JSON="$TMP_ROOT/meta.json"
MIRROR_ROOT="$TMP_ROOT/mirror"
DB_DIR="$TMP_ROOT/work-db"
ACTUAL_CCR="$OUT_DIR/actual.ccr"
ACTUAL_REPORT="$OUT_DIR/report.json"
ACTUAL_VRPS="$OUT_DIR/actual-vrps.csv"
ACTUAL_VAPS="$OUT_DIR/actual-vaps.csv"
REF_VRPS="$OUT_DIR/reference-vrps.csv"
REF_VAPS="$OUT_DIR/reference-vaps.csv"
COMPARE_JSON="$OUT_DIR/compare-summary.json"
RUN_LOG="$OUT_DIR/run.log"
rm -rf "$TMP_ROOT"
mkdir -p "$TMP_ROOT"
"$CIR_EXTRACT_INPUTS_BIN" --cir "$CIR" --tals-dir "$TALS_DIR" --meta-json "$META_JSON"
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --static-root "$STATIC_ROOT" --mirror-root "$MIRROR_ROOT")
if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db)
fi
"${materialize_cmd[@]}"
VALIDATION_TIME="$(python3 - <<'PY' "$META_JSON"
import json,sys
print(json.load(open(sys.argv[1]))["validationTime"])
PY
)"
FIRST_TAL="$(python3 - <<'PY' "$META_JSON"
import json,sys
print(json.load(open(sys.argv[1]))["talFiles"][0]["path"])
PY
)"
export CIR_MIRROR_ROOT="$(python3 - <<'PY' "$MIRROR_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
export REAL_RSYNC_BIN="$REAL_RSYNC_BIN"
export CIR_LOCAL_LINK_MODE=1
"$RPKI_BIN" \
--db "$DB_DIR" \
--tal-path "$FIRST_TAL" \
--disable-rrdp \
--rsync-command "$WRAPPER" \
--validation-time "$VALIDATION_TIME" \
--ccr-out "$ACTUAL_CCR" \
--report-json "$ACTUAL_REPORT" \
>"$RUN_LOG" 2>&1
"$CCR_TO_COMPARE_VIEWS_BIN" --ccr "$ACTUAL_CCR" --vrps-out "$ACTUAL_VRPS" --vaps-out "$ACTUAL_VAPS" --trust-anchor unknown
"$CCR_TO_COMPARE_VIEWS_BIN" --ccr "$REFERENCE_CCR" --vrps-out "$REF_VRPS" --vaps-out "$REF_VAPS" --trust-anchor unknown
python3 - <<'PY' "$ACTUAL_VRPS" "$REF_VRPS" "$ACTUAL_VAPS" "$REF_VAPS" "$COMPARE_JSON"
import csv, json, sys
def rows(path):
with open(path, newline="") as f:
return list(csv.reader(f))[1:]
actual_vrps = {tuple(r) for r in rows(sys.argv[1])}
ref_vrps = {tuple(r) for r in rows(sys.argv[2])}
actual_vaps = {tuple(r) for r in rows(sys.argv[3])}
ref_vaps = {tuple(r) for r in rows(sys.argv[4])}
summary = {
"vrps": {
"actual": len(actual_vrps),
"reference": len(ref_vrps),
"only_in_actual": sorted(actual_vrps - ref_vrps)[:20],
"only_in_reference": sorted(ref_vrps - actual_vrps)[:20],
"match": actual_vrps == ref_vrps,
},
"vaps": {
"actual": len(actual_vaps),
"reference": len(ref_vaps),
"only_in_actual": sorted(actual_vaps - ref_vaps)[:20],
"only_in_reference": sorted(ref_vaps - actual_vaps)[:20],
"match": actual_vaps == ref_vaps,
}
}
with open(sys.argv[5], "w") as f:
json.dump(summary, f, indent=2)
PY
if [[ "$KEEP_DB" -ne 1 ]]; then
rm -rf "$TMP_ROOT"
fi
echo "done: $OUT_DIR"

View File

@ -0,0 +1,209 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_routinator.sh \
--cir <path> \
--static-root <path> \
--out-dir <path> \
--reference-ccr <path> \
[--keep-db] \
[--routinator-root <path>] \
[--routinator-bin <path>] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
RPKI_DEV_ROOT="${RPKI_DEV_ROOT:-$ROOT_DIR}"
CIR=""
STATIC_ROOT=""
OUT_DIR=""
REFERENCE_CCR=""
KEEP_DB=0
ROUTINATOR_ROOT="${ROUTINATOR_ROOT:-/home/yuyr/dev/rust_playground/routinator}"
ROUTINATOR_BIN="${ROUTINATOR_BIN:-$ROUTINATOR_ROOT/target/debug/routinator}"
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
CIR_MATERIALIZE_BIN="$ROOT_DIR/target/release/cir_materialize"
CIR_EXTRACT_INPUTS_BIN="$ROOT_DIR/target/release/cir_extract_inputs"
CCR_TO_COMPARE_VIEWS_BIN="$ROOT_DIR/target/release/ccr_to_compare_views"
WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
JSON_TO_VAPS="$ROOT_DIR/scripts/cir/json_to_vaps_csv.py"
FAKETIME_LIB="${FAKETIME_LIB:-$ROOT_DIR/target/tools/faketime_pkg/extracted/libfaketime/usr/lib/x86_64-linux-gnu/faketime/libfaketime.so.1}"
while [[ $# -gt 0 ]]; do
case "$1" in
--cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;;
--routinator-root) ROUTINATOR_ROOT="$2"; shift 2 ;;
--routinator-bin) ROUTINATOR_BIN="$2"; shift 2 ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$CIR" && -n "$STATIC_ROOT" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" ]] || {
usage >&2
exit 2
}
mkdir -p "$OUT_DIR"
if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin cir_materialize --bin cir_extract_inputs --bin ccr_to_compare_views
)
fi
TMP_ROOT="$OUT_DIR/.tmp"
TALS_DIR="$TMP_ROOT/tals"
META_JSON="$TMP_ROOT/meta.json"
MIRROR_ROOT="$TMP_ROOT/mirror"
WORK_REPO="$TMP_ROOT/repository"
RUN_LOG="$OUT_DIR/routinator.log"
ACTUAL_VRPS="$OUT_DIR/actual-vrps.csv"
ACTUAL_VAPS_JSON="$OUT_DIR/actual-vaps.json"
ACTUAL_VAPS="$OUT_DIR/actual-vaps.csv"
REF_VRPS="$OUT_DIR/reference-vrps.csv"
REF_VAPS="$OUT_DIR/reference-vaps.csv"
SUMMARY_JSON="$OUT_DIR/compare-summary.json"
rm -rf "$TMP_ROOT"
mkdir -p "$TMP_ROOT"
"$CIR_EXTRACT_INPUTS_BIN" --cir "$CIR" --tals-dir "$TALS_DIR" --meta-json "$META_JSON"
python3 - <<'PY' "$TALS_DIR"
from pathlib import Path
import sys
for tal in Path(sys.argv[1]).glob("*.tal"):
lines = tal.read_text(encoding="utf-8").splitlines()
rsync_uris = [line for line in lines if line.startswith("rsync://")]
base64_lines = []
seen_sep = False
for line in lines:
if seen_sep:
if line.strip():
base64_lines.append(line)
elif line.strip() == "":
seen_sep = True
tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8")
PY
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --static-root "$STATIC_ROOT" --mirror-root "$MIRROR_ROOT")
if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db)
fi
"${materialize_cmd[@]}"
VALIDATION_TIME="$(python3 - <<'PY' "$META_JSON"
import json,sys
print(json.load(open(sys.argv[1]))["validationTime"])
PY
)"
FIRST_TAL="$(python3 - <<'PY' "$META_JSON"
import json,sys
print(json.load(open(sys.argv[1]))["talFiles"][0]["path"])
PY
)"
TA_NAME="$(basename "$FIRST_TAL" .tal)"
FAKE_EPOCH="$(python3 - <<'PY' "$VALIDATION_TIME"
from datetime import datetime, timezone
import sys
dt = datetime.fromisoformat(sys.argv[1].replace("Z", "+00:00")).astimezone(timezone.utc)
print(int(dt.timestamp()))
PY
)"
export CIR_MIRROR_ROOT="$(python3 - <<'PY' "$MIRROR_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
export REAL_RSYNC_BIN="$REAL_RSYNC_BIN"
export CIR_LOCAL_LINK_MODE=1
env \
LD_PRELOAD="$FAKETIME_LIB" \
FAKETIME_FMT=%s \
FAKETIME="$FAKE_EPOCH" \
FAKETIME_DONT_FAKE_MONOTONIC=1 \
"$ROUTINATOR_BIN" \
--repository-dir "$WORK_REPO" \
--disable-rrdp \
--rsync-command "$WRAPPER" \
--no-rir-tals \
--extra-tals-dir "$TALS_DIR" \
--enable-aspa \
update --complete >"$RUN_LOG" 2>&1
env \
LD_PRELOAD="$FAKETIME_LIB" \
FAKETIME_FMT=%s \
FAKETIME="$FAKE_EPOCH" \
FAKETIME_DONT_FAKE_MONOTONIC=1 \
"$ROUTINATOR_BIN" \
--repository-dir "$WORK_REPO" \
--disable-rrdp \
--rsync-command "$WRAPPER" \
--no-rir-tals \
--extra-tals-dir "$TALS_DIR" \
--enable-aspa \
vrps --noupdate -o "$ACTUAL_VRPS" >>"$RUN_LOG" 2>&1
env \
LD_PRELOAD="$FAKETIME_LIB" \
FAKETIME_FMT=%s \
FAKETIME="$FAKE_EPOCH" \
FAKETIME_DONT_FAKE_MONOTONIC=1 \
"$ROUTINATOR_BIN" \
--repository-dir "$WORK_REPO" \
--disable-rrdp \
--rsync-command "$WRAPPER" \
--no-rir-tals \
--extra-tals-dir "$TALS_DIR" \
--enable-aspa \
vrps --noupdate --format json -o "$ACTUAL_VAPS_JSON" >>"$RUN_LOG" 2>&1
python3 "$JSON_TO_VAPS" --input "$ACTUAL_VAPS_JSON" --csv-out "$ACTUAL_VAPS"
"$CCR_TO_COMPARE_VIEWS_BIN" --ccr "$REFERENCE_CCR" --vrps-out "$REF_VRPS" --vaps-out "$REF_VAPS" --trust-anchor "$TA_NAME"
python3 - <<'PY' "$ACTUAL_VRPS" "$REF_VRPS" "$ACTUAL_VAPS" "$REF_VAPS" "$SUMMARY_JSON"
import csv, json, sys
def rows(path):
with open(path, newline="") as f:
return list(csv.reader(f))[1:]
actual_vrps = {tuple(r) for r in rows(sys.argv[1])}
ref_vrps = {tuple(r) for r in rows(sys.argv[2])}
actual_vaps = {tuple(r) for r in rows(sys.argv[3])}
ref_vaps = {tuple(r) for r in rows(sys.argv[4])}
summary = {
"vrps": {
"actual": len(actual_vrps),
"reference": len(ref_vrps),
"match": actual_vrps == ref_vrps,
"only_in_actual": sorted(actual_vrps - ref_vrps)[:20],
"only_in_reference": sorted(ref_vrps - actual_vrps)[:20],
},
"vaps": {
"actual": len(actual_vaps),
"reference": len(ref_vaps),
"match": actual_vaps == ref_vaps,
"only_in_actual": sorted(actual_vaps - ref_vaps)[:20],
"only_in_reference": sorted(ref_vaps - actual_vaps)[:20],
}
}
with open(sys.argv[5], "w") as f:
json.dump(summary, f, indent=2)
PY
if [[ "$KEEP_DB" -ne 1 ]]; then
rm -rf "$TMP_ROOT"
fi
echo "done: $OUT_DIR"

View File

@ -0,0 +1,179 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
./scripts/cir/run_cir_replay_rpki_client.sh \
--cir <path> \
--static-root <path> \
--out-dir <path> \
--reference-ccr <path> \
--build-dir <path> \
[--keep-db] \
[--real-rsync-bin <path>]
EOF
}
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
CIR=""
STATIC_ROOT=""
OUT_DIR=""
REFERENCE_CCR=""
BUILD_DIR=""
KEEP_DB=0
REAL_RSYNC_BIN="${REAL_RSYNC_BIN:-/usr/bin/rsync}"
CIR_MATERIALIZE_BIN="$ROOT_DIR/target/release/cir_materialize"
CIR_EXTRACT_INPUTS_BIN="$ROOT_DIR/target/release/cir_extract_inputs"
CCR_TO_COMPARE_VIEWS_BIN="$ROOT_DIR/target/release/ccr_to_compare_views"
WRAPPER="$ROOT_DIR/scripts/cir/cir-rsync-wrapper"
while [[ $# -gt 0 ]]; do
case "$1" in
--cir) CIR="$2"; shift 2 ;;
--static-root) STATIC_ROOT="$2"; shift 2 ;;
--out-dir) OUT_DIR="$2"; shift 2 ;;
--reference-ccr) REFERENCE_CCR="$2"; shift 2 ;;
--build-dir) BUILD_DIR="$2"; shift 2 ;;
--keep-db) KEEP_DB=1; shift ;;
--real-rsync-bin) REAL_RSYNC_BIN="$2"; shift 2 ;;
-h|--help) usage; exit 0 ;;
*) echo "unknown argument: $1" >&2; usage; exit 2 ;;
esac
done
[[ -n "$CIR" && -n "$STATIC_ROOT" && -n "$OUT_DIR" && -n "$REFERENCE_CCR" && -n "$BUILD_DIR" ]] || {
usage >&2
exit 2
}
mkdir -p "$OUT_DIR"
if [[ ! -x "$CIR_MATERIALIZE_BIN" || ! -x "$CIR_EXTRACT_INPUTS_BIN" || ! -x "$CCR_TO_COMPARE_VIEWS_BIN" ]]; then
(
cd "$ROOT_DIR"
cargo build --release --bin cir_materialize --bin cir_extract_inputs --bin ccr_to_compare_views
)
fi
TMP_ROOT="$OUT_DIR/.tmp"
TALS_DIR="$TMP_ROOT/tals"
META_JSON="$TMP_ROOT/meta.json"
MIRROR_ROOT="$TMP_ROOT/mirror"
CACHE_DIR="$TMP_ROOT/cache"
OUT_CCR_DIR="$TMP_ROOT/out"
RUN_LOG="$OUT_DIR/rpki-client.log"
ACTUAL_VRPS="$OUT_DIR/actual-vrps.csv"
ACTUAL_VAPS="$OUT_DIR/actual-vaps.csv"
ACTUAL_VAPS_META="$OUT_DIR/actual-vaps-meta.json"
ACTUAL_VRPS_META="$OUT_DIR/actual-vrps-meta.json"
REF_VRPS="$OUT_DIR/reference-vrps.csv"
REF_VAPS="$OUT_DIR/reference-vaps.csv"
SUMMARY_JSON="$OUT_DIR/compare-summary.json"
rm -rf "$TMP_ROOT"
mkdir -p "$TMP_ROOT"
"$CIR_EXTRACT_INPUTS_BIN" --cir "$CIR" --tals-dir "$TALS_DIR" --meta-json "$META_JSON"
python3 - <<'PY' "$TALS_DIR"
from pathlib import Path
import sys
for tal in Path(sys.argv[1]).glob("*.tal"):
lines = tal.read_text(encoding="utf-8").splitlines()
rsync_uris = [line for line in lines if line.startswith("rsync://")]
base64_lines = []
seen_sep = False
for line in lines:
if seen_sep:
if line.strip():
base64_lines.append(line)
elif line.strip() == "":
seen_sep = True
tal.write_text("\n".join(rsync_uris) + "\n\n" + "\n".join(base64_lines) + "\n", encoding="utf-8")
PY
materialize_cmd=("$CIR_MATERIALIZE_BIN" --cir "$CIR" --static-root "$STATIC_ROOT" --mirror-root "$MIRROR_ROOT")
if [[ "$KEEP_DB" -eq 1 ]]; then
materialize_cmd+=(--keep-db)
fi
"${materialize_cmd[@]}"
VALIDATION_EPOCH="$(python3 - <<'PY' "$META_JSON"
from datetime import datetime, timezone
import json, sys
vt = json.load(open(sys.argv[1]))["validationTime"]
dt = datetime.fromisoformat(vt.replace("Z", "+00:00")).astimezone(timezone.utc)
print(int(dt.timestamp()))
PY
)"
FIRST_TAL="$(python3 - <<'PY' "$META_JSON"
import json,sys
print(json.load(open(sys.argv[1]))["talFiles"][0]["path"])
PY
)"
TA_NAME="$(basename "$FIRST_TAL" .tal)"
export CIR_MIRROR_ROOT="$(python3 - <<'PY' "$MIRROR_ROOT"
from pathlib import Path
import sys
print(Path(sys.argv[1]).resolve())
PY
)"
export REAL_RSYNC_BIN="$REAL_RSYNC_BIN"
export CIR_LOCAL_LINK_MODE=1
mkdir -p "$CACHE_DIR" "$OUT_CCR_DIR"
"$BUILD_DIR/src/rpki-client" \
-R \
-e "$WRAPPER" \
-P "$VALIDATION_EPOCH" \
-t "$FIRST_TAL" \
-d "$CACHE_DIR" \
"$OUT_CCR_DIR" >"$RUN_LOG" 2>&1
"$BUILD_DIR/tests/rpki-ccr-vrps" \
--input "$OUT_CCR_DIR/rpki.ccr" \
--ta "$TA_NAME" \
--csv-out "$ACTUAL_VRPS" \
--meta-out "$ACTUAL_VRPS_META"
"$BUILD_DIR/tests/rpki-ccr-vaps" \
--input "$OUT_CCR_DIR/rpki.ccr" \
--ta "$TA_NAME" \
--csv-out "$ACTUAL_VAPS" \
--meta-out "$ACTUAL_VAPS_META"
"$CCR_TO_COMPARE_VIEWS_BIN" --ccr "$REFERENCE_CCR" --vrps-out "$REF_VRPS" --vaps-out "$REF_VAPS" --trust-anchor "$TA_NAME"
python3 - <<'PY' "$ACTUAL_VRPS" "$REF_VRPS" "$ACTUAL_VAPS" "$REF_VAPS" "$SUMMARY_JSON"
import csv, json, sys
def rows(path):
with open(path, newline="") as f:
return list(csv.reader(f))[1:]
actual_vrps = {tuple(r) for r in rows(sys.argv[1])}
ref_vrps = {tuple(r) for r in rows(sys.argv[2])}
actual_vaps = {tuple(r) for r in rows(sys.argv[3])}
ref_vaps = {tuple(r) for r in rows(sys.argv[4])}
summary = {
"vrps": {
"actual": len(actual_vrps),
"reference": len(ref_vrps),
"match": actual_vrps == ref_vrps,
"only_in_actual": sorted(actual_vrps - ref_vrps)[:20],
"only_in_reference": sorted(ref_vrps - actual_vrps)[:20],
},
"vaps": {
"actual": len(actual_vaps),
"reference": len(ref_vaps),
"match": actual_vaps == ref_vaps,
"only_in_actual": sorted(actual_vaps - ref_vaps)[:20],
"only_in_reference": sorted(ref_vaps - actual_vaps)[:20],
}
}
with open(sys.argv[5], "w") as f:
json.dump(summary, f, indent=2)
PY
if [[ "$KEEP_DB" -ne 1 ]]; then
rm -rf "$TMP_ROOT"
fi
echo "done: $OUT_DIR"

View File

@ -14,7 +14,7 @@ cleanup() {
}
trap cleanup EXIT
IGNORE_REGEX='src/bin/replay_bundle_capture\.rs|src/bin/replay_bundle_capture_delta\.rs|src/bin/replay_bundle_capture_sequence\.rs|src/bundle/live_capture\.rs'
IGNORE_REGEX='src/bin/replay_bundle_capture\.rs|src/bin/replay_bundle_capture_delta\.rs|src/bin/replay_bundle_capture_sequence\.rs|src/bin/replay_bundle_record\.rs|src/bin/replay_bundle_refresh_sequence_outputs\.rs|src/bin/measure_sequence_replay\.rs|src/bin/repository_view_stats\.rs|src/bin/trace_arin_missing_vrps\.rs|src/bin/db_stats\.rs|src/bin/rrdp_state_dump\.rs|src/bin/ccr_dump\.rs|src/bin/ccr_verify\.rs|src/bin/ccr_to_routinator_csv\.rs|src/bin/ccr_to_compare_views\.rs|src/bin/cir_materialize\.rs|src/bin/cir_extract_inputs\.rs|src/bundle/live_capture\.rs|src/bundle/record_io\.rs|src/progress_log\.rs'
# Preserve colored output even though we post-process output by running under a pseudo-TTY.
# We run tests only once, then generate both CLI text + HTML reports without rerunning tests.

4034
specs/cir.excalidraw Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,79 @@
use std::path::PathBuf;
fn usage() -> &'static str {
"Usage: cir_extract_inputs --cir <path> --tals-dir <path> --meta-json <path>"
}
fn main() {
if let Err(err) = run(std::env::args().collect()) {
eprintln!("error: {err}");
std::process::exit(2);
}
}
fn run(argv: Vec<String>) -> Result<(), String> {
let mut cir_path: Option<PathBuf> = None;
let mut tals_dir: Option<PathBuf> = None;
let mut meta_json: Option<PathBuf> = None;
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--help" | "-h" => return Err(usage().to_string()),
"--cir" => {
i += 1;
cir_path = Some(PathBuf::from(argv.get(i).ok_or("--cir requires a value")?));
}
"--tals-dir" => {
i += 1;
tals_dir = Some(PathBuf::from(argv.get(i).ok_or("--tals-dir requires a value")?));
}
"--meta-json" => {
i += 1;
meta_json = Some(PathBuf::from(argv.get(i).ok_or("--meta-json requires a value")?));
}
other => return Err(format!("unknown argument: {other}\n\n{}", usage())),
}
i += 1;
}
let cir_path = cir_path.ok_or_else(|| format!("--cir is required\n\n{}", usage()))?;
let tals_dir = tals_dir.ok_or_else(|| format!("--tals-dir is required\n\n{}", usage()))?;
let meta_json = meta_json.ok_or_else(|| format!("--meta-json is required\n\n{}", usage()))?;
let bytes = std::fs::read(&cir_path)
.map_err(|e| format!("read CIR failed: {}: {e}", cir_path.display()))?;
let cir = rpki::cir::decode_cir(&bytes).map_err(|e| e.to_string())?;
std::fs::create_dir_all(&tals_dir)
.map_err(|e| format!("create tals dir failed: {}: {e}", tals_dir.display()))?;
let mut tal_files = Vec::new();
for (idx, tal) in cir.tals.iter().enumerate() {
let filename = format!("tal-{:03}.tal", idx + 1);
let path = tals_dir.join(filename);
std::fs::write(&path, &tal.tal_bytes)
.map_err(|e| format!("write TAL failed: {}: {e}", path.display()))?;
tal_files.push(serde_json::json!({
"talUri": tal.tal_uri,
"path": path,
}));
}
let validation_time = cir
.validation_time
.format(&time::format_description::well_known::Rfc3339)
.map_err(|e| format!("format validationTime failed: {e}"))?;
let meta = serde_json::json!({
"validationTime": validation_time,
"talFiles": tal_files,
});
if let Some(parent) = meta_json.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("create meta parent failed: {}: {e}", parent.display()))?;
}
std::fs::write(&meta_json, serde_json::to_vec_pretty(&meta).unwrap())
.map_err(|e| format!("write meta json failed: {}: {e}", meta_json.display()))?;
Ok(())
}

View File

@ -0,0 +1,77 @@
use std::path::PathBuf;
fn usage() -> &'static str {
"Usage: cir_materialize --cir <path> --static-root <path> --mirror-root <path> [--keep-db]"
}
fn main() {
if let Err(err) = run(std::env::args().collect()) {
eprintln!("error: {err}");
std::process::exit(2);
}
}
fn run(argv: Vec<String>) -> Result<(), String> {
let mut cir_path: Option<PathBuf> = None;
let mut static_root: Option<PathBuf> = None;
let mut mirror_root: Option<PathBuf> = None;
let mut keep_db = false;
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--help" | "-h" => return Err(usage().to_string()),
"--cir" => {
i += 1;
cir_path = Some(PathBuf::from(argv.get(i).ok_or("--cir requires a value")?));
}
"--static-root" => {
i += 1;
static_root = Some(PathBuf::from(
argv.get(i).ok_or("--static-root requires a value")?,
));
}
"--mirror-root" => {
i += 1;
mirror_root = Some(PathBuf::from(
argv.get(i).ok_or("--mirror-root requires a value")?,
));
}
"--keep-db" => keep_db = true,
other => return Err(format!("unknown argument: {other}\n\n{}", usage())),
}
i += 1;
}
let cir_path = cir_path.ok_or_else(|| format!("--cir is required\n\n{}", usage()))?;
let static_root =
static_root.ok_or_else(|| format!("--static-root is required\n\n{}", usage()))?;
let mirror_root =
mirror_root.ok_or_else(|| format!("--mirror-root is required\n\n{}", usage()))?;
let bytes = std::fs::read(&cir_path)
.map_err(|e| format!("read CIR failed: {}: {e}", cir_path.display()))?;
let cir = rpki::cir::decode_cir(&bytes).map_err(|e| e.to_string())?;
let result = rpki::cir::materialize_cir(&cir, &static_root, &mirror_root, true);
match result {
Ok(summary) => {
eprintln!(
"materialized CIR: mirror={} objects={} linked={} copied={} keep_db={}",
mirror_root.display(),
summary.object_count,
summary.linked_files,
summary.copied_files,
keep_db
);
Ok(())
}
Err(err) => {
if !keep_db && mirror_root.exists() {
let _ = std::fs::remove_dir_all(&mirror_root);
}
Err(err.to_string())
}
}
}

View File

@ -0,0 +1,108 @@
use rocksdb::{DB, IteratorMode, Options};
use rpki::storage::{column_family_descriptors, CF_REPOSITORY_VIEW};
use std::fs;
use std::path::{Path, PathBuf};
fn usage() -> String {
let bin = "repository_view_stats";
format!(
"\
Usage:
{bin} --db <path>
Options:
--db <path> RocksDB directory
--help Show this help
"
)
}
fn parse_args(argv: &[String]) -> Result<PathBuf, String> {
if argv.iter().any(|arg| arg == "--help" || arg == "-h") {
return Err(usage());
}
let mut db_path: Option<PathBuf> = None;
let mut i = 1usize;
while i < argv.len() {
match argv[i].as_str() {
"--db" => {
i += 1;
let value = argv.get(i).ok_or("--db requires a value")?;
db_path = Some(PathBuf::from(value));
}
other => return Err(format!("unknown argument: {other}\n\n{}", usage())),
}
i += 1;
}
db_path.ok_or_else(|| format!("--db is required\n\n{}", usage()))
}
fn dir_size(path: &Path) -> Result<u64, Box<dyn std::error::Error>> {
let mut total = 0u64;
for entry in fs::read_dir(path)? {
let entry = entry?;
let file_type = entry.file_type()?;
if file_type.is_file() {
total = total.saturating_add(entry.metadata()?.len());
} else if file_type.is_dir() {
total = total.saturating_add(dir_size(&entry.path())?);
}
}
Ok(total)
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let argv: Vec<String> = std::env::args().collect();
let db_path = parse_args(&argv).map_err(|e| -> Box<dyn std::error::Error> { e.into() })?;
let mut opts = Options::default();
opts.create_if_missing(false);
opts.create_missing_column_families(false);
let db = DB::open_cf_descriptors(&opts, &db_path, column_family_descriptors())?;
let cf = db
.cf_handle(CF_REPOSITORY_VIEW)
.ok_or("missing repository_view column family")?;
let mut kv_count = 0u64;
let mut key_bytes_total = 0u64;
let mut value_bytes_total = 0u64;
let mut max_key_bytes = 0usize;
let mut max_value_bytes = 0usize;
for entry in db.iterator_cf(cf, IteratorMode::Start) {
let (key, value) = entry?;
kv_count += 1;
key_bytes_total += key.len() as u64;
value_bytes_total += value.len() as u64;
max_key_bytes = max_key_bytes.max(key.len());
max_value_bytes = max_value_bytes.max(value.len());
}
let logical_total_bytes = key_bytes_total + value_bytes_total;
let avg_key_bytes = if kv_count > 0 {
key_bytes_total as f64 / kv_count as f64
} else {
0.0
};
let avg_value_bytes = if kv_count > 0 {
value_bytes_total as f64 / kv_count as f64
} else {
0.0
};
let out = serde_json::json!({
"db_path": db_path.display().to_string(),
"column_family": CF_REPOSITORY_VIEW,
"kv_count": kv_count,
"key_bytes_total": key_bytes_total,
"value_bytes_total": value_bytes_total,
"logical_total_bytes": logical_total_bytes,
"db_dir_on_disk_bytes": dir_size(&db_path)?,
"avg_key_bytes": avg_key_bytes,
"avg_value_bytes": avg_value_bytes,
"max_key_bytes": max_key_bytes,
"max_value_bytes": max_value_bytes,
});
println!("{}", serde_json::to_string_pretty(&out)?);
Ok(())
}

View File

@ -1,20 +1,26 @@
pub mod build;
pub mod decode;
pub mod encode;
pub mod export;
pub mod verify;
pub mod dump;
pub mod hash;
pub mod model;
#[cfg(feature = "full")]
pub mod build;
#[cfg(feature = "full")]
pub mod export;
#[cfg(feature = "full")]
pub mod verify;
#[cfg(feature = "full")]
pub use build::{
CcrBuildError, build_aspa_payload_state, build_manifest_state_from_vcirs,
build_roa_payload_state, build_trust_anchor_state,
};
pub use decode::{CcrDecodeError, decode_content_info};
pub use encode::{CcrEncodeError, encode_content_info};
#[cfg(feature = "full")]
pub use export::{CcrExportError, build_ccr_from_run, write_ccr_file};
pub use dump::{CcrDumpError, dump_content_info_json, dump_content_info_json_value};
#[cfg(feature = "full")]
pub use verify::{CcrVerifyError, CcrVerifySummary, extract_vrp_rows, verify_against_report_json_path, verify_against_vcir_store, verify_against_vcir_store_path, verify_content_info, verify_content_info_bytes};
pub use hash::{compute_state_hash, verify_state_hash};
pub use model::{

161
src/cir/decode.rs Normal file
View File

@ -0,0 +1,161 @@
use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
};
use crate::data_model::common::DerReader;
use crate::data_model::oid::{OID_SHA256, OID_SHA256_RAW};
use der_parser::der::parse_der_oid;
#[derive(Debug, thiserror::Error)]
pub enum CirDecodeError {
#[error("DER parse error: {0}")]
Parse(String),
#[error("unexpected CIR version: expected {expected}, got {actual}")]
UnexpectedVersion { expected: u32, actual: u32 },
#[error("unexpected digest algorithm OID: expected {expected}, got {actual}")]
UnexpectedDigestAlgorithm { expected: &'static str, actual: String },
#[error("CIR model validation failed after decode: {0}")]
Validate(String),
}
pub fn decode_cir(der: &[u8]) -> Result<CanonicalInputRepresentation, CirDecodeError> {
let mut top = DerReader::new(der);
let mut seq = top.take_sequence().map_err(CirDecodeError::Parse)?;
if !top.is_empty() {
return Err(CirDecodeError::Parse("trailing bytes after CIR".into()));
}
let version = seq.take_uint_u64().map_err(CirDecodeError::Parse)? as u32;
if version != CIR_VERSION_V1 {
return Err(CirDecodeError::UnexpectedVersion {
expected: CIR_VERSION_V1,
actual: version,
});
}
let hash_alg = decode_hash_alg(seq.take_tag(0x06).map_err(CirDecodeError::Parse)?)?;
let validation_time =
parse_generalized_time(seq.take_tag(0x18).map_err(CirDecodeError::Parse)?)?;
let objects_der = seq.take_tag(0x30).map_err(CirDecodeError::Parse)?;
let mut objects_reader = DerReader::new(objects_der);
let mut objects = Vec::new();
while !objects_reader.is_empty() {
let (_tag, full, _value) = objects_reader
.take_any_full()
.map_err(CirDecodeError::Parse)?;
objects.push(decode_object(full)?);
}
let tals_der = seq.take_tag(0x30).map_err(CirDecodeError::Parse)?;
let mut tals_reader = DerReader::new(tals_der);
let mut tals = Vec::new();
while !tals_reader.is_empty() {
let (_tag, full, _value) = tals_reader.take_any_full().map_err(CirDecodeError::Parse)?;
tals.push(decode_tal(full)?);
}
if !seq.is_empty() {
return Err(CirDecodeError::Parse("trailing fields in CIR".into()));
}
let cir = CanonicalInputRepresentation {
version,
hash_alg,
validation_time,
objects,
tals,
};
cir.validate().map_err(CirDecodeError::Validate)?;
Ok(cir)
}
fn decode_hash_alg(raw_body: &[u8]) -> Result<CirHashAlgorithm, CirDecodeError> {
if raw_body != OID_SHA256_RAW {
return Err(CirDecodeError::UnexpectedDigestAlgorithm {
expected: OID_SHA256,
actual: oid_string(raw_body)?,
});
}
Ok(CirHashAlgorithm::Sha256)
}
fn decode_object(der: &[u8]) -> Result<CirObject, CirDecodeError> {
let mut top = DerReader::new(der);
let mut seq = top.take_sequence().map_err(CirDecodeError::Parse)?;
if !top.is_empty() {
return Err(CirDecodeError::Parse("trailing bytes after CirObject".into()));
}
let rsync_uri = std::str::from_utf8(seq.take_tag(0x16).map_err(CirDecodeError::Parse)?)
.map_err(|e| CirDecodeError::Parse(e.to_string()))?
.to_string();
let sha256 = seq.take_octet_string().map_err(CirDecodeError::Parse)?.to_vec();
if !seq.is_empty() {
return Err(CirDecodeError::Parse("trailing fields in CirObject".into()));
}
Ok(CirObject { rsync_uri, sha256 })
}
fn decode_tal(der: &[u8]) -> Result<CirTal, CirDecodeError> {
let mut top = DerReader::new(der);
let mut seq = top.take_sequence().map_err(CirDecodeError::Parse)?;
if !top.is_empty() {
return Err(CirDecodeError::Parse("trailing bytes after CirTal".into()));
}
let tal_uri = std::str::from_utf8(seq.take_tag(0x16).map_err(CirDecodeError::Parse)?)
.map_err(|e| CirDecodeError::Parse(e.to_string()))?
.to_string();
let tal_bytes = seq.take_octet_string().map_err(CirDecodeError::Parse)?.to_vec();
if !seq.is_empty() {
return Err(CirDecodeError::Parse("trailing fields in CirTal".into()));
}
Ok(CirTal { tal_uri, tal_bytes })
}
fn oid_string(raw_body: &[u8]) -> Result<String, CirDecodeError> {
let der = {
let mut out = Vec::with_capacity(raw_body.len() + 2);
out.push(0x06);
if raw_body.len() < 0x80 {
out.push(raw_body.len() as u8);
} else {
return Err(CirDecodeError::Parse("OID too long".into()));
}
out.extend_from_slice(raw_body);
out
};
let (_rem, oid) = parse_der_oid(&der).map_err(|e| CirDecodeError::Parse(e.to_string()))?;
let oid = oid
.as_oid_val()
.map_err(|e| CirDecodeError::Parse(e.to_string()))?;
Ok(oid.to_string())
}
fn parse_generalized_time(bytes: &[u8]) -> Result<time::OffsetDateTime, CirDecodeError> {
let s = std::str::from_utf8(bytes).map_err(|e| CirDecodeError::Parse(e.to_string()))?;
if s.len() != 15 || !s.ends_with('Z') {
return Err(CirDecodeError::Parse(
"GeneralizedTime must be YYYYMMDDHHMMSSZ".into(),
));
}
let parse = |range: std::ops::Range<usize>| -> Result<u32, CirDecodeError> {
s[range]
.parse::<u32>()
.map_err(|e| CirDecodeError::Parse(e.to_string()))
};
let year = parse(0..4)? as i32;
let month = parse(4..6)? as u8;
let day = parse(6..8)? as u8;
let hour = parse(8..10)? as u8;
let minute = parse(10..12)? as u8;
let second = parse(12..14)? as u8;
let month = time::Month::try_from(month)
.map_err(|e| CirDecodeError::Parse(e.to_string()))?;
let date = time::Date::from_calendar_date(year, month, day)
.map_err(|e| CirDecodeError::Parse(e.to_string()))?;
let timev = time::Time::from_hms(hour, minute, second)
.map_err(|e| CirDecodeError::Parse(e.to_string()))?;
Ok(time::PrimitiveDateTime::new(date, timev).assume_utc())
}

147
src/cir/encode.rs Normal file
View File

@ -0,0 +1,147 @@
use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
};
use crate::data_model::oid::OID_SHA256_RAW;
#[derive(Debug, thiserror::Error)]
pub enum CirEncodeError {
#[error("CIR model validation failed: {0}")]
Validate(String),
}
pub fn encode_cir(
cir: &CanonicalInputRepresentation,
) -> Result<Vec<u8>, CirEncodeError> {
cir.validate().map_err(CirEncodeError::Validate)?;
Ok(encode_sequence(&[
encode_integer_u32(cir.version),
encode_oid(match cir.hash_alg {
CirHashAlgorithm::Sha256 => OID_SHA256_RAW,
}),
encode_generalized_time(cir.validation_time),
encode_sequence(
&cir.objects
.iter()
.map(encode_object)
.collect::<Result<Vec<_>, _>>()?,
),
encode_sequence(
&cir.tals
.iter()
.map(encode_tal)
.collect::<Result<Vec<_>, _>>()?,
),
]))
}
fn encode_object(object: &CirObject) -> Result<Vec<u8>, CirEncodeError> {
object.validate().map_err(CirEncodeError::Validate)?;
Ok(encode_sequence(&[
encode_ia5_string(object.rsync_uri.as_bytes()),
encode_octet_string(&object.sha256),
]))
}
fn encode_tal(tal: &CirTal) -> Result<Vec<u8>, CirEncodeError> {
tal.validate().map_err(CirEncodeError::Validate)?;
Ok(encode_sequence(&[
encode_ia5_string(tal.tal_uri.as_bytes()),
encode_octet_string(&tal.tal_bytes),
]))
}
fn encode_generalized_time(t: time::OffsetDateTime) -> Vec<u8> {
let t = t.to_offset(time::UtcOffset::UTC);
let s = format!(
"{:04}{:02}{:02}{:02}{:02}{:02}Z",
t.year(),
u8::from(t.month()),
t.day(),
t.hour(),
t.minute(),
t.second()
);
encode_tlv(0x18, s.into_bytes())
}
fn encode_integer_u32(v: u32) -> Vec<u8> {
encode_integer_bytes(unsigned_integer_bytes(v as u64))
}
fn encode_integer_bytes(mut bytes: Vec<u8>) -> Vec<u8> {
if bytes.is_empty() {
bytes.push(0);
}
if bytes[0] & 0x80 != 0 {
bytes.insert(0, 0);
}
encode_tlv(0x02, bytes)
}
fn unsigned_integer_bytes(v: u64) -> Vec<u8> {
if v == 0 {
return vec![0];
}
let mut out = Vec::new();
let mut n = v;
while n > 0 {
out.push((n & 0xFF) as u8);
n >>= 8;
}
out.reverse();
out
}
fn encode_oid(raw_body: &[u8]) -> Vec<u8> {
encode_tlv(0x06, raw_body.to_vec())
}
fn encode_ia5_string(bytes: &[u8]) -> Vec<u8> {
encode_tlv(0x16, bytes.to_vec())
}
fn encode_octet_string(bytes: &[u8]) -> Vec<u8> {
encode_tlv(0x04, bytes.to_vec())
}
fn encode_sequence(elements: &[Vec<u8>]) -> Vec<u8> {
let mut body = Vec::new();
for element in elements {
body.extend_from_slice(element);
}
encode_tlv(0x30, body)
}
fn encode_tlv(tag: u8, value: Vec<u8>) -> Vec<u8> {
let mut out = Vec::with_capacity(1 + encoded_len_len(value.len()) + value.len());
out.push(tag);
encode_len_into(value.len(), &mut out);
out.extend_from_slice(&value);
out
}
fn encoded_len_len(len: usize) -> usize {
if len < 0x80 {
1
} else {
1 + len.to_be_bytes().iter().skip_while(|&&b| b == 0).count()
}
}
fn encode_len_into(len: usize, out: &mut Vec<u8>) {
if len < 0x80 {
out.push(len as u8);
return;
}
let bytes = len.to_be_bytes();
let first_non_zero = bytes.iter().position(|&b| b != 0).unwrap_or(bytes.len() - 1);
let len_bytes = &bytes[first_non_zero..];
out.push(0x80 | (len_bytes.len() as u8));
out.extend_from_slice(len_bytes);
}
#[allow(dead_code)]
const _: () = {
let _ = CIR_VERSION_V1;
};

285
src/cir/export.rs Normal file
View File

@ -0,0 +1,285 @@
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::Path;
use crate::cir::encode::{CirEncodeError, encode_cir};
use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
};
use crate::cir::static_pool::{
CirStaticPoolError, CirStaticPoolExportSummary, write_bytes_to_static_pool,
export_hashes_from_store,
};
use crate::data_model::ta::TrustAnchor;
use crate::storage::{RepositoryViewState, RocksStore};
#[derive(Debug, thiserror::Error)]
pub enum CirExportError {
#[error("list repository_view entries failed: {0}")]
ListRepositoryView(String),
#[error("CIR TAL URI must be http(s), got: {0}")]
InvalidTalUri(String),
#[error("TAL does not contain any rsync TA URI; CIR replay scheme A requires one")]
MissingTaRsyncUri,
#[error("CIR model validation failed: {0}")]
Validate(String),
#[error("encode CIR failed: {0}")]
Encode(#[from] CirEncodeError),
#[error("static pool export failed: {0}")]
StaticPool(#[from] CirStaticPoolError),
#[error("write CIR file failed: {0}: {1}")]
Write(String, String),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirExportSummary {
pub object_count: usize,
pub tal_count: usize,
pub static_pool: CirStaticPoolExportSummary,
}
pub fn build_cir_from_run(
store: &RocksStore,
trust_anchor: &TrustAnchor,
tal_uri: &str,
validation_time: time::OffsetDateTime,
) -> Result<CanonicalInputRepresentation, CirExportError> {
if !(tal_uri.starts_with("https://") || tal_uri.starts_with("http://")) {
return Err(CirExportError::InvalidTalUri(tal_uri.to_string()));
}
let entries = store
.list_repository_view_entries_with_prefix("rsync://")
.map_err(|e| CirExportError::ListRepositoryView(e.to_string()))?;
let mut objects: BTreeMap<String, String> = BTreeMap::new();
for entry in entries {
if matches!(
entry.state,
RepositoryViewState::Present | RepositoryViewState::Replaced
) && let Some(hash) = entry.current_hash
{
objects.insert(entry.rsync_uri, hash.to_ascii_lowercase());
}
}
let ta_hash = ta_sha256_hex(&trust_anchor.ta_certificate.raw_der);
let mut saw_rsync_uri = false;
for uri in &trust_anchor.tal.ta_uris {
if uri.scheme() == "rsync" {
saw_rsync_uri = true;
objects.insert(uri.as_str().to_string(), ta_hash.clone());
}
}
if !saw_rsync_uri {
return Err(CirExportError::MissingTaRsyncUri);
}
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: validation_time.to_offset(time::UtcOffset::UTC),
objects: objects
.into_iter()
.map(|(rsync_uri, sha256_hex)| CirObject {
rsync_uri,
sha256: hex::decode(sha256_hex).expect("validated hex"),
})
.collect(),
tals: vec![CirTal {
tal_uri: tal_uri.to_string(),
tal_bytes: trust_anchor.tal.raw.clone(),
}],
};
cir.validate().map_err(CirExportError::Validate)?;
Ok(cir)
}
pub fn write_cir_file(path: &Path, cir: &CanonicalInputRepresentation) -> Result<(), CirExportError> {
let der = encode_cir(cir)?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| CirExportError::Write(path.display().to_string(), e.to_string()))?;
}
std::fs::write(path, der)
.map_err(|e| CirExportError::Write(path.display().to_string(), e.to_string()))
}
pub fn export_cir_static_pool(
store: &RocksStore,
static_root: &Path,
capture_date_utc: time::Date,
cir: &CanonicalInputRepresentation,
trust_anchor: &TrustAnchor,
) -> Result<CirStaticPoolExportSummary, CirExportError> {
let ta_hash = ta_sha256_hex(&trust_anchor.ta_certificate.raw_der);
let hashes = cir
.objects
.iter()
.map(|item| hex::encode(&item.sha256))
.filter(|hash| hash != &ta_hash)
.collect::<Vec<_>>();
let mut summary = export_hashes_from_store(store, static_root, capture_date_utc, &hashes)?;
let ta_result = write_bytes_to_static_pool(
static_root,
capture_date_utc,
&ta_hash,
&trust_anchor.ta_certificate.raw_der,
)?;
let mut unique = hashes.iter().cloned().collect::<BTreeSet<_>>();
unique.insert(ta_hash.clone());
summary.unique_hashes = unique.len();
if ta_result.written {
summary.written_files += 1;
} else {
summary.reused_files += 1;
}
Ok(summary)
}
pub fn export_cir_from_run(
store: &RocksStore,
trust_anchor: &TrustAnchor,
tal_uri: &str,
validation_time: time::OffsetDateTime,
cir_out: &Path,
static_root: &Path,
capture_date_utc: time::Date,
) -> Result<CirExportSummary, CirExportError> {
let cir = build_cir_from_run(store, trust_anchor, tal_uri, validation_time)?;
let static_pool = export_cir_static_pool(store, static_root, capture_date_utc, &cir, trust_anchor)?;
write_cir_file(cir_out, &cir)?;
Ok(CirExportSummary {
object_count: cir.objects.len(),
tal_count: cir.tals.len(),
static_pool,
})
}
fn ta_sha256_hex(bytes: &[u8]) -> String {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(bytes))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cir::decode::decode_cir;
use crate::cir::static_pool_path;
use crate::data_model::ta::TrustAnchor;
use crate::data_model::tal::Tal;
use crate::storage::{RawByHashEntry, RepositoryViewEntry, RepositoryViewState, RocksStore};
fn sample_time() -> time::OffsetDateTime {
time::OffsetDateTime::parse(
"2026-04-07T12:34:56Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap()
}
fn sample_date() -> time::Date {
time::Date::from_calendar_date(2026, time::Month::April, 7).unwrap()
}
fn sample_trust_anchor() -> TrustAnchor {
let base = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tal_bytes = std::fs::read(base.join("tests/fixtures/tal/apnic-rfc7730-https.tal")).unwrap();
let ta_der = std::fs::read(base.join("tests/fixtures/ta/apnic-ta.cer")).unwrap();
let tal = Tal::decode_bytes(&tal_bytes).unwrap();
TrustAnchor::bind_der(tal, &ta_der, None).unwrap()
}
fn sha256_hex(bytes: &[u8]) -> String {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(bytes))
}
#[test]
fn build_cir_from_run_collects_repository_view_and_tal() {
let td = tempfile::tempdir().unwrap();
let store = RocksStore::open(td.path()).unwrap();
let bytes = b"object-a".to_vec();
let hash = sha256_hex(&bytes);
let mut raw = RawByHashEntry::from_bytes(hash.clone(), bytes.clone());
raw.origin_uris.push("rsync://example.test/repo/a.cer".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/a.cer".to_string(),
current_hash: Some(hash),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("cer".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let ta = sample_trust_anchor();
let cir = build_cir_from_run(&store, &ta, "https://example.test/root.tal", sample_time())
.expect("build cir");
assert_eq!(cir.version, CIR_VERSION_V1);
assert_eq!(cir.tals.len(), 1);
assert_eq!(cir.tals[0].tal_uri, "https://example.test/root.tal");
assert!(cir
.objects
.iter()
.any(|item| item.rsync_uri == "rsync://example.test/repo/a.cer"));
assert!(cir
.objects
.iter()
.any(|item| item.rsync_uri.contains("apnic-rpki-root-iana-origin.cer")));
}
#[test]
fn export_cir_from_run_writes_der_and_static_pool() {
let td = tempfile::tempdir().unwrap();
let store_dir = td.path().join("db");
let out_dir = td.path().join("out");
let static_root = td.path().join("static");
let store = RocksStore::open(&store_dir).unwrap();
let bytes = b"object-b".to_vec();
let hash = sha256_hex(&bytes);
let mut raw = RawByHashEntry::from_bytes(hash.clone(), bytes.clone());
raw.origin_uris.push("rsync://example.test/repo/b.roa".into());
store.put_raw_by_hash_entry(&raw).unwrap();
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/b.roa".to_string(),
current_hash: Some(hash.clone()),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("roa".to_string()),
state: RepositoryViewState::Present,
})
.unwrap();
let ta = sample_trust_anchor();
let cir_path = out_dir.join("example.cir");
let summary = export_cir_from_run(
&store,
&ta,
"https://example.test/root.tal",
sample_time(),
&cir_path,
&static_root,
sample_date(),
)
.expect("export cir");
assert_eq!(summary.tal_count, 1);
assert!(summary.object_count >= 2);
let der = std::fs::read(&cir_path).unwrap();
let cir = decode_cir(&der).unwrap();
assert_eq!(cir.tals[0].tal_uri, "https://example.test/root.tal");
let object_path = static_pool_path(&static_root, sample_date(), &hash).unwrap();
assert_eq!(std::fs::read(object_path).unwrap(), bytes);
}
}

388
src/cir/materialize.rs Normal file
View File

@ -0,0 +1,388 @@
use std::fs;
use std::path::{Path, PathBuf};
use crate::cir::model::CanonicalInputRepresentation;
#[derive(Debug, thiserror::Error)]
pub enum CirMaterializeError {
#[error("invalid rsync URI: {0}")]
InvalidRsyncUri(String),
#[error("rsync URI must reference a file object, got directory-like URI: {0}")]
DirectoryLikeRsyncUri(String),
#[error("create mirror root failed: {path}: {detail}")]
CreateMirrorRoot { path: String, detail: String },
#[error("remove mirror root failed: {path}: {detail}")]
RemoveMirrorRoot { path: String, detail: String },
#[error("create parent directory failed: {path}: {detail}")]
CreateParent { path: String, detail: String },
#[error("remove existing target failed: {path}: {detail}")]
RemoveExistingTarget { path: String, detail: String },
#[error("static object not found for sha256={sha256_hex}")]
MissingStaticObject { sha256_hex: String },
#[error("link target failed: {src} -> {dst}: {detail}")]
Link { src: String, dst: String, detail: String },
#[error("copy target failed: {src} -> {dst}: {detail}")]
Copy { src: String, dst: String, detail: String },
#[error("mirror tree mismatch after materialize: {0}")]
TreeMismatch(String),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirMaterializeSummary {
pub object_count: usize,
pub linked_files: usize,
pub copied_files: usize,
}
pub fn materialize_cir(
cir: &CanonicalInputRepresentation,
static_root: &Path,
mirror_root: &Path,
clean_rebuild: bool,
) -> Result<CirMaterializeSummary, CirMaterializeError> {
cir.validate()
.map_err(CirMaterializeError::TreeMismatch)?;
if clean_rebuild && mirror_root.exists() {
fs::remove_dir_all(mirror_root).map_err(|e| CirMaterializeError::RemoveMirrorRoot {
path: mirror_root.display().to_string(),
detail: e.to_string(),
})?;
}
fs::create_dir_all(mirror_root).map_err(|e| CirMaterializeError::CreateMirrorRoot {
path: mirror_root.display().to_string(),
detail: e.to_string(),
})?;
let mut linked_files = 0usize;
let mut copied_files = 0usize;
for object in &cir.objects {
let sha256_hex = hex::encode(&object.sha256);
let source = resolve_static_pool_file(static_root, &sha256_hex)?;
let relative = mirror_relative_path_for_rsync_uri(&object.rsync_uri)?;
let target = mirror_root.join(&relative);
if let Some(parent) = target.parent() {
fs::create_dir_all(parent).map_err(|e| CirMaterializeError::CreateParent {
path: parent.display().to_string(),
detail: e.to_string(),
})?;
}
if target.exists() {
fs::remove_file(&target).map_err(|e| CirMaterializeError::RemoveExistingTarget {
path: target.display().to_string(),
detail: e.to_string(),
})?;
}
match fs::hard_link(&source, &target) {
Ok(()) => linked_files += 1,
Err(link_err) => {
fs::copy(&source, &target).map_err(|copy_err| CirMaterializeError::Copy {
src: source.display().to_string(),
dst: target.display().to_string(),
detail: format!("{copy_err}; original link error: {link_err}"),
})?;
copied_files += 1;
}
}
}
let actual = collect_materialized_uris(mirror_root)?;
let expected = cir
.objects
.iter()
.map(|item| item.rsync_uri.clone())
.collect::<std::collections::BTreeSet<_>>();
if actual != expected {
return Err(CirMaterializeError::TreeMismatch(format!(
"expected {} files, got {} files",
expected.len(),
actual.len()
)));
}
Ok(CirMaterializeSummary {
object_count: cir.objects.len(),
linked_files,
copied_files,
})
}
pub fn mirror_relative_path_for_rsync_uri(rsync_uri: &str) -> Result<PathBuf, CirMaterializeError> {
let url = url::Url::parse(rsync_uri)
.map_err(|_| CirMaterializeError::InvalidRsyncUri(rsync_uri.to_string()))?;
if url.scheme() != "rsync" {
return Err(CirMaterializeError::InvalidRsyncUri(rsync_uri.to_string()));
}
let host = url
.host_str()
.ok_or_else(|| CirMaterializeError::InvalidRsyncUri(rsync_uri.to_string()))?;
let segments = url
.path_segments()
.ok_or_else(|| CirMaterializeError::InvalidRsyncUri(rsync_uri.to_string()))?
.collect::<Vec<_>>();
if segments.is_empty() || segments.last().copied().unwrap_or_default().is_empty() {
return Err(CirMaterializeError::DirectoryLikeRsyncUri(
rsync_uri.to_string(),
));
}
let mut path = PathBuf::from(host);
for segment in segments {
if !segment.is_empty() {
path.push(segment);
}
}
Ok(path)
}
pub fn resolve_static_pool_file(
static_root: &Path,
sha256_hex: &str,
) -> Result<PathBuf, CirMaterializeError> {
if sha256_hex.len() != 64 || !sha256_hex.as_bytes().iter().all(u8::is_ascii_hexdigit) {
return Err(CirMaterializeError::MissingStaticObject {
sha256_hex: sha256_hex.to_string(),
});
}
let prefix1 = &sha256_hex[0..2];
let prefix2 = &sha256_hex[2..4];
let entries = fs::read_dir(static_root)
.map_err(|_| CirMaterializeError::MissingStaticObject {
sha256_hex: sha256_hex.to_string(),
})?;
let mut dates = entries
.filter_map(Result::ok)
.filter(|entry| entry.path().is_dir())
.map(|entry| entry.path())
.collect::<Vec<_>>();
dates.sort();
for date_dir in dates {
let candidate = date_dir.join(prefix1).join(prefix2).join(sha256_hex);
if candidate.is_file() {
return Ok(candidate);
}
}
Err(CirMaterializeError::MissingStaticObject {
sha256_hex: sha256_hex.to_string(),
})
}
fn collect_materialized_uris(
mirror_root: &Path,
) -> Result<std::collections::BTreeSet<String>, CirMaterializeError> {
let mut out = std::collections::BTreeSet::new();
let mut stack = vec![mirror_root.to_path_buf()];
while let Some(path) = stack.pop() {
for entry in fs::read_dir(&path).map_err(|e| CirMaterializeError::CreateMirrorRoot {
path: path.display().to_string(),
detail: e.to_string(),
})? {
let entry = entry.map_err(|e| CirMaterializeError::CreateMirrorRoot {
path: path.display().to_string(),
detail: e.to_string(),
})?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else {
let rel = path
.strip_prefix(mirror_root)
.expect("materialized path under mirror root")
.to_string_lossy()
.replace('\\', "/");
let uri = format!("rsync://{rel}");
out.insert(uri);
}
}
}
Ok(out)
}
#[cfg(test)]
mod tests {
use super::{CirMaterializeError, materialize_cir, mirror_relative_path_for_rsync_uri, resolve_static_pool_file};
use crate::cir::model::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
};
use std::path::{Path, PathBuf};
fn sample_time() -> time::OffsetDateTime {
time::OffsetDateTime::parse(
"2026-04-07T12:34:56Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap()
}
fn sample_cir() -> CanonicalInputRepresentation {
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: vec![
CirObject {
rsync_uri: "rsync://example.net/repo/a.cer".to_string(),
sha256: hex::decode(
"1111111111111111111111111111111111111111111111111111111111111111",
)
.unwrap(),
},
CirObject {
rsync_uri: "rsync://example.net/repo/nested/b.roa".to_string(),
sha256: hex::decode(
"2222222222222222222222222222222222222222222222222222222222222222",
)
.unwrap(),
},
],
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
}
}
#[test]
fn mirror_relative_path_for_rsync_uri_maps_host_and_path() {
let path =
mirror_relative_path_for_rsync_uri("rsync://example.net/repo/nested/b.roa").unwrap();
assert_eq!(path, PathBuf::from("example.net").join("repo").join("nested").join("b.roa"));
}
#[test]
fn resolve_static_pool_file_finds_hash_across_dates() {
let td = tempfile::tempdir().unwrap();
let path = td
.path()
.join("20260407")
.join("11")
.join("11");
std::fs::create_dir_all(&path).unwrap();
let file = path.join("1111111111111111111111111111111111111111111111111111111111111111");
std::fs::write(&file, b"x").unwrap();
let resolved = resolve_static_pool_file(
td.path(),
"1111111111111111111111111111111111111111111111111111111111111111",
)
.unwrap();
assert_eq!(resolved, file);
}
#[test]
fn resolve_static_pool_file_rejects_invalid_hash_and_missing_hash() {
let td = tempfile::tempdir().unwrap();
let err = resolve_static_pool_file(td.path(), "not-a-hash")
.expect_err("invalid hash should fail");
assert!(matches!(err, CirMaterializeError::MissingStaticObject { .. }));
let err = resolve_static_pool_file(
td.path(),
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
)
.expect_err("missing hash should fail");
assert!(matches!(err, CirMaterializeError::MissingStaticObject { .. }));
}
#[test]
fn mirror_relative_path_rejects_non_rsync_and_directory_like_uris() {
let err = mirror_relative_path_for_rsync_uri("https://example.net/repo/a.roa")
.expect_err("non-rsync uri must fail");
assert!(matches!(err, CirMaterializeError::InvalidRsyncUri(_)));
let err = mirror_relative_path_for_rsync_uri("rsync://example.net/repo/")
.expect_err("directory-like uri must fail");
assert!(matches!(err, CirMaterializeError::DirectoryLikeRsyncUri(_)));
}
#[test]
fn materialize_clean_rebuild_creates_exact_tree_and_removes_stale_files() {
let td = tempfile::tempdir().unwrap();
let static_root = td.path().join("static");
let mirror_root = td.path().join("mirror");
write_static(
&static_root,
"20260407",
"1111111111111111111111111111111111111111111111111111111111111111",
b"a",
);
write_static(
&static_root,
"20260407",
"2222222222222222222222222222222222222222222222222222222222222222",
b"b",
);
std::fs::create_dir_all(mirror_root.join("stale")).unwrap();
std::fs::write(mirror_root.join("stale/old.txt"), b"old").unwrap();
let summary = materialize_cir(&sample_cir(), &static_root, &mirror_root, true).unwrap();
assert_eq!(summary.object_count, 2);
assert_eq!(std::fs::read(mirror_root.join("example.net/repo/a.cer")).unwrap(), b"a");
assert_eq!(
std::fs::read(mirror_root.join("example.net/repo/nested/b.roa")).unwrap(),
b"b"
);
assert!(!mirror_root.join("stale/old.txt").exists());
}
#[test]
fn materialize_fails_when_static_object_missing() {
let td = tempfile::tempdir().unwrap();
let err = materialize_cir(
&sample_cir(),
td.path(),
&td.path().join("mirror"),
true,
)
.expect_err("missing static object must fail");
assert!(matches!(err, CirMaterializeError::MissingStaticObject { .. }));
}
#[test]
fn materialize_without_clean_rebuild_detects_stale_extra_files() {
let td = tempfile::tempdir().unwrap();
let static_root = td.path().join("static");
let mirror_root = td.path().join("mirror");
write_static(
&static_root,
"20260407",
"1111111111111111111111111111111111111111111111111111111111111111",
b"a",
);
write_static(
&static_root,
"20260407",
"2222222222222222222222222222222222222222222222222222222222222222",
b"b",
);
std::fs::create_dir_all(mirror_root.join("extra")).unwrap();
std::fs::write(mirror_root.join("extra/stale.txt"), b"stale").unwrap();
let err = materialize_cir(&sample_cir(), &static_root, &mirror_root, false)
.expect_err("stale extra files should fail exact tree check");
assert!(matches!(err, CirMaterializeError::TreeMismatch(_)));
}
fn write_static(root: &Path, date: &str, hash: &str, bytes: &[u8]) {
let path = root.join(date).join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&path).unwrap();
std::fs::write(path.join(hash), bytes).unwrap();
}
}

335
src/cir/mod.rs Normal file
View File

@ -0,0 +1,335 @@
pub mod decode;
pub mod encode;
pub mod materialize;
pub mod model;
#[cfg(feature = "full")]
pub mod export;
#[cfg(feature = "full")]
pub mod static_pool;
pub use decode::{CirDecodeError, decode_cir};
pub use encode::{CirEncodeError, encode_cir};
pub use materialize::{
CirMaterializeError, CirMaterializeSummary, materialize_cir, mirror_relative_path_for_rsync_uri,
resolve_static_pool_file,
};
pub use model::{
CIR_VERSION_V1, CirHashAlgorithm, CirObject, CirTal, CanonicalInputRepresentation,
};
#[cfg(feature = "full")]
pub use export::{CirExportError, CirExportSummary, build_cir_from_run, export_cir_from_run, write_cir_file};
#[cfg(feature = "full")]
pub use static_pool::{
CirStaticPoolError, CirStaticPoolExportSummary, CirStaticPoolWriteResult,
export_hashes_from_store, static_pool_path, static_pool_relative_path,
write_bytes_to_static_pool, write_raw_entry_to_static_pool,
};
#[cfg(test)]
mod tests {
use super::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
decode_cir, encode_cir,
};
fn sample_time() -> time::OffsetDateTime {
time::OffsetDateTime::parse(
"2026-04-07T12:34:56Z",
&time::format_description::well_known::Rfc3339,
)
.expect("valid rfc3339")
}
fn sample_cir() -> CanonicalInputRepresentation {
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: vec![
CirObject {
rsync_uri: "rsync://example.net/repo/a.cer".to_string(),
sha256: vec![0x11; 32],
},
CirObject {
rsync_uri: "rsync://example.net/repo/b.roa".to_string(),
sha256: vec![0x22; 32],
},
],
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes:
b"https://tal.example.net/ta.cer\nrsync://example.net/repo/ta.cer\nMIIB"
.to_vec(),
}],
}
}
fn test_encode_tlv(tag: u8, value: &[u8]) -> Vec<u8> {
let mut out = Vec::with_capacity(8 + value.len());
out.push(tag);
if value.len() < 0x80 {
out.push(value.len() as u8);
} else {
let len = value.len();
let bytes = len.to_be_bytes();
let first_non_zero = bytes.iter().position(|&b| b != 0).unwrap_or(bytes.len() - 1);
let len_bytes = &bytes[first_non_zero..];
out.push(0x80 | len_bytes.len() as u8);
out.extend_from_slice(len_bytes);
}
out.extend_from_slice(value);
out
}
#[test]
fn cir_roundtrip_full_succeeds() {
let cir = sample_cir();
let der = encode_cir(&cir).expect("encode cir");
let decoded = decode_cir(&der).expect("decode cir");
assert_eq!(decoded, cir);
}
#[test]
fn cir_roundtrip_minimal_succeeds() {
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: Vec::new(),
tals: vec![CirTal {
tal_uri: "https://tal.example.net/minimal.tal".to_string(),
tal_bytes: b"rsync://example.net/repo/ta.cer\nMIIB".to_vec(),
}],
};
let der = encode_cir(&cir).expect("encode minimal cir");
let decoded = decode_cir(&der).expect("decode minimal cir");
assert_eq!(decoded, cir);
}
#[test]
fn cir_model_rejects_unsorted_duplicate_objects() {
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: vec![
CirObject {
rsync_uri: "rsync://example.net/repo/z.roa".to_string(),
sha256: vec![0x11; 32],
},
CirObject {
rsync_uri: "rsync://example.net/repo/a.roa".to_string(),
sha256: vec![0x22; 32],
},
],
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
};
let err = encode_cir(&cir).expect_err("unsorted objects must fail");
assert!(err.to_string().contains("CIR.objects"), "{err}");
}
#[test]
fn cir_model_rejects_duplicate_tals() {
let cir = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: Vec::new(),
tals: vec![
CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"a".to_vec(),
},
CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"b".to_vec(),
},
],
};
let err = encode_cir(&cir).expect_err("duplicate tals must fail");
assert!(err.to_string().contains("CIR.tals"), "{err}");
}
#[test]
fn cir_decode_rejects_wrong_version() {
let mut der = encode_cir(&sample_cir()).expect("encode cir");
let pos = der
.windows(3)
.position(|window| window == [0x02, 0x01, CIR_VERSION_V1 as u8])
.expect("find version integer");
der[pos + 2] = 2;
let err = decode_cir(&der).expect_err("wrong version must fail");
assert!(err.to_string().contains("unexpected CIR version"), "{err}");
}
#[test]
fn cir_decode_rejects_wrong_hash_oid() {
let mut der = encode_cir(&sample_cir()).expect("encode cir");
let sha256_bytes = crate::data_model::oid::OID_SHA256_RAW;
let idx = der
.windows(sha256_bytes.len())
.position(|window| window == sha256_bytes)
.expect("find sha256 oid");
der[idx + sha256_bytes.len() - 1] ^= 0x01;
let err = decode_cir(&der).expect_err("wrong oid must fail");
assert!(
err.to_string()
.contains(crate::data_model::oid::OID_SHA256),
"{err}"
);
}
#[test]
fn cir_decode_rejects_bad_generalized_time() {
let mut der = encode_cir(&sample_cir()).expect("encode cir");
let pos = der
.windows(15)
.position(|window| window == b"20260407123456Z")
.expect("find generalized time");
der[pos + 14] = b'X';
let err = decode_cir(&der).expect_err("bad time must fail");
assert!(err.to_string().contains("GeneralizedTime"), "{err}");
}
#[test]
fn cir_model_rejects_non_rsync_object_uri_and_empty_tals() {
let bad_object = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: vec![CirObject {
rsync_uri: "https://example.net/repo/a.roa".to_string(),
sha256: vec![0x11; 32],
}],
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
};
let err = encode_cir(&bad_object).expect_err("non-rsync object uri must fail");
assert!(err.to_string().contains("rsync://"), "{err}");
let no_tals = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: Vec::new(),
tals: Vec::new(),
};
let err = encode_cir(&no_tals).expect_err("empty tals must fail");
assert!(err.to_string().contains("CIR.tals must be non-empty"), "{err}");
}
#[test]
fn cir_model_rejects_non_utc_time_bad_hash_len_and_non_http_tal_uri() {
let bad_time = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time().to_offset(time::UtcOffset::from_hms(8, 0, 0).unwrap()),
objects: Vec::new(),
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
};
let err = encode_cir(&bad_time).expect_err("non-utc validation time must fail");
assert!(err.to_string().contains("UTC"), "{err}");
let bad_hash = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: vec![CirObject {
rsync_uri: "rsync://example.net/repo/a.roa".to_string(),
sha256: vec![0x11; 31],
}],
tals: vec![CirTal {
tal_uri: "https://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
};
let err = encode_cir(&bad_hash).expect_err("bad digest len must fail");
assert!(err.to_string().contains("32 bytes"), "{err}");
let bad_tal_uri = CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: sample_time(),
objects: Vec::new(),
tals: vec![CirTal {
tal_uri: "ftp://tal.example.net/root.tal".to_string(),
tal_bytes: b"x".to_vec(),
}],
};
let err = encode_cir(&bad_tal_uri).expect_err("bad tal uri must fail");
assert!(err.to_string().contains("http:// or https://"), "{err}");
}
#[test]
fn cir_decode_rejects_trailing_bytes_and_trailing_fields() {
let cir = sample_cir();
let mut der = encode_cir(&cir).expect("encode cir");
der.push(0);
let err = decode_cir(&der).expect_err("trailing bytes after cir must fail");
assert!(err.to_string().contains("trailing bytes after CIR"), "{err}");
let object = test_encode_tlv(
0x30,
&[
test_encode_tlv(0x16, b"rsync://example.net/repo/a.roa"),
test_encode_tlv(0x04, &[0x11; 32]),
test_encode_tlv(0x02, &[0x01]),
]
.concat(),
);
let tal = test_encode_tlv(
0x30,
&[
test_encode_tlv(0x16, b"https://tal.example.net/root.tal"),
test_encode_tlv(0x04, b"x"),
]
.concat(),
);
let bad = test_encode_tlv(
0x30,
&[
test_encode_tlv(0x02, &[CIR_VERSION_V1 as u8]),
test_encode_tlv(0x06, crate::data_model::oid::OID_SHA256_RAW),
test_encode_tlv(0x18, b"20260407123456Z"),
test_encode_tlv(0x30, &object),
test_encode_tlv(0x30, &tal),
]
.concat(),
);
let err = decode_cir(&bad).expect_err("trailing field in object must fail");
assert!(err.to_string().contains("trailing fields in CirObject"), "{err}");
}
#[test]
fn cir_decode_rejects_invalid_object_and_tal_shapes() {
let cir = sample_cir();
let mut der = encode_cir(&cir).expect("encode cir");
let rsync_text = b"rsync://example.net/repo/a.cer";
let idx = der
.windows(rsync_text.len())
.position(|window| window == rsync_text)
.expect("find object uri");
der[idx] = 0xFF;
let err = decode_cir(&der).expect_err("invalid utf8 object uri must fail");
assert!(err.to_string().contains("utf-8"), "{err}");
let mut der = encode_cir(&cir).expect("encode cir");
let tal_text = b"https://tal.example.net/root.tal";
let idx = der
.windows(tal_text.len())
.position(|window| window == tal_text)
.expect("find tal uri");
der[idx] = 0xFF;
let err = decode_cir(&der).expect_err("invalid utf8 tal uri must fail");
assert!(err.to_string().contains("utf-8"), "{err}");
}
}

120
src/cir/model.rs Normal file
View File

@ -0,0 +1,120 @@
use crate::data_model::oid::OID_SHA256;
pub const CIR_VERSION_V1: u32 = 1;
pub const DIGEST_LEN_SHA256: usize = 32;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum CirHashAlgorithm {
Sha256,
}
impl CirHashAlgorithm {
pub fn oid(&self) -> &'static str {
match self {
Self::Sha256 => OID_SHA256,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CanonicalInputRepresentation {
pub version: u32,
pub hash_alg: CirHashAlgorithm,
pub validation_time: time::OffsetDateTime,
pub objects: Vec<CirObject>,
pub tals: Vec<CirTal>,
}
impl CanonicalInputRepresentation {
pub fn validate(&self) -> Result<(), String> {
if self.version != CIR_VERSION_V1 {
return Err(format!(
"CIR version must be {CIR_VERSION_V1}, got {}",
self.version
));
}
if !matches!(self.hash_alg, CirHashAlgorithm::Sha256) {
return Err("CIR hashAlg must be SHA-256".into());
}
if self.validation_time.offset() != time::UtcOffset::UTC {
return Err("CIR validationTime must be UTC".into());
}
validate_sorted_unique_strings(
self.objects.iter().map(|item| item.rsync_uri.as_str()),
"CIR.objects must be sorted by rsyncUri and unique",
)?;
validate_sorted_unique_strings(
self.tals.iter().map(|item| item.tal_uri.as_str()),
"CIR.tals must be sorted by talUri and unique",
)?;
if self.tals.is_empty() {
return Err("CIR.tals must be non-empty".into());
}
for object in &self.objects {
object.validate()?;
}
for tal in &self.tals {
tal.validate()?;
}
Ok(())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirObject {
pub rsync_uri: String,
pub sha256: Vec<u8>,
}
impl CirObject {
pub fn validate(&self) -> Result<(), String> {
if !self.rsync_uri.starts_with("rsync://") {
return Err(format!(
"CirObject.rsync_uri must start with rsync://, got {}",
self.rsync_uri
));
}
if self.sha256.len() != DIGEST_LEN_SHA256 {
return Err(format!(
"CirObject.sha256 must be {DIGEST_LEN_SHA256} bytes, got {}",
self.sha256.len()
));
}
Ok(())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirTal {
pub tal_uri: String,
pub tal_bytes: Vec<u8>,
}
impl CirTal {
pub fn validate(&self) -> Result<(), String> {
if !(self.tal_uri.starts_with("https://") || self.tal_uri.starts_with("http://")) {
return Err(format!(
"CirTal.tal_uri must start with http:// or https://, got {}",
self.tal_uri
));
}
if self.tal_bytes.is_empty() {
return Err("CirTal.tal_bytes must be non-empty".into());
}
Ok(())
}
}
fn validate_sorted_unique_strings<'a>(
items: impl IntoIterator<Item = &'a str>,
message: &str,
) -> Result<(), String> {
let mut prev: Option<&'a str> = None;
for key in items {
if let Some(prev_key) = prev && key <= prev_key {
return Err(message.into());
}
prev = Some(key);
}
Ok(())
}

376
src/cir/static_pool.rs Normal file
View File

@ -0,0 +1,376 @@
use std::collections::BTreeSet;
use std::fs::{self, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use crate::storage::{RawByHashEntry, RocksStore};
#[derive(Debug, thiserror::Error)]
pub enum CirStaticPoolError {
#[error("invalid sha256 hex: {0}")]
InvalidSha256Hex(String),
#[error("raw bytes are empty for sha256={sha256_hex}")]
EmptyBytes { sha256_hex: String },
#[error("raw bytes do not match sha256 hex: {sha256_hex}")]
HashMismatch { sha256_hex: String },
#[error("create directory failed: {path}: {detail}")]
CreateDir { path: String, detail: String },
#[error("create temp file failed: {path}: {detail}")]
CreateTemp { path: String, detail: String },
#[error("write temp file failed: {path}: {detail}")]
WriteTemp { path: String, detail: String },
#[error("sync temp file failed: {path}: {detail}")]
SyncTemp { path: String, detail: String },
#[error("publish temp file failed: {temp_path} -> {final_path}: {detail}")]
Publish {
temp_path: String,
final_path: String,
detail: String,
},
#[error("remove temp file failed: {path}: {detail}")]
RemoveTemp { path: String, detail: String },
#[error("raw_by_hash entry missing for sha256={sha256_hex}")]
MissingRawByHash { sha256_hex: String },
#[error("storage error: {0}")]
Storage(String),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirStaticPoolWriteResult {
pub final_path: PathBuf,
pub written: bool,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CirStaticPoolExportSummary {
pub unique_hashes: usize,
pub written_files: usize,
pub reused_files: usize,
}
pub fn static_pool_relative_path(
capture_date_utc: time::Date,
sha256_hex: &str,
) -> Result<PathBuf, CirStaticPoolError> {
validate_sha256_hex(sha256_hex)?;
let date = format_utc_date(capture_date_utc);
Ok(PathBuf::from(date)
.join(&sha256_hex[0..2])
.join(&sha256_hex[2..4])
.join(sha256_hex))
}
pub fn static_pool_path(
static_root: &Path,
capture_date_utc: time::Date,
sha256_hex: &str,
) -> Result<PathBuf, CirStaticPoolError> {
Ok(static_root.join(static_pool_relative_path(
capture_date_utc,
sha256_hex,
)?))
}
pub fn write_bytes_to_static_pool(
static_root: &Path,
capture_date_utc: time::Date,
sha256_hex_value: &str,
bytes: &[u8],
) -> Result<CirStaticPoolWriteResult, CirStaticPoolError> {
validate_sha256_hex(sha256_hex_value)?;
if bytes.is_empty() {
return Err(CirStaticPoolError::EmptyBytes {
sha256_hex: sha256_hex_value.to_string(),
});
}
let computed = compute_sha256_hex(bytes);
if computed != sha256_hex_value.to_ascii_lowercase() {
return Err(CirStaticPoolError::HashMismatch {
sha256_hex: sha256_hex_value.to_string(),
});
}
let final_path = static_pool_path(static_root, capture_date_utc, sha256_hex_value)?;
if final_path.exists() {
return Ok(CirStaticPoolWriteResult {
final_path,
written: false,
});
}
let parent = final_path.parent().expect("static pool file has parent");
fs::create_dir_all(parent).map_err(|e| CirStaticPoolError::CreateDir {
path: parent.display().to_string(),
detail: e.to_string(),
})?;
let temp_path = parent.join(format!("{sha256_hex_value}.tmp.{}", uuid::Uuid::new_v4()));
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(&temp_path)
.map_err(|e| CirStaticPoolError::CreateTemp {
path: temp_path.display().to_string(),
detail: e.to_string(),
})?;
file.write_all(bytes)
.map_err(|e| CirStaticPoolError::WriteTemp {
path: temp_path.display().to_string(),
detail: e.to_string(),
})?;
file.sync_all().map_err(|e| CirStaticPoolError::SyncTemp {
path: temp_path.display().to_string(),
detail: e.to_string(),
})?;
drop(file);
match fs::hard_link(&temp_path, &final_path) {
Ok(()) => {
fs::remove_file(&temp_path).map_err(|e| CirStaticPoolError::RemoveTemp {
path: temp_path.display().to_string(),
detail: e.to_string(),
})?;
Ok(CirStaticPoolWriteResult {
final_path,
written: true,
})
}
Err(e) if final_path.exists() => {
fs::remove_file(&temp_path).map_err(|remove_err| CirStaticPoolError::RemoveTemp {
path: temp_path.display().to_string(),
detail: remove_err.to_string(),
})?;
let _ = e;
Ok(CirStaticPoolWriteResult {
final_path,
written: false,
})
}
Err(e) => Err(CirStaticPoolError::Publish {
temp_path: temp_path.display().to_string(),
final_path: final_path.display().to_string(),
detail: e.to_string(),
}),
}
}
pub fn write_raw_entry_to_static_pool(
static_root: &Path,
capture_date_utc: time::Date,
entry: &RawByHashEntry,
) -> Result<CirStaticPoolWriteResult, CirStaticPoolError> {
write_bytes_to_static_pool(
static_root,
capture_date_utc,
&entry.sha256_hex,
&entry.bytes,
)
}
pub fn export_hashes_from_store(
store: &RocksStore,
static_root: &Path,
capture_date_utc: time::Date,
sha256_hexes: &[String],
) -> Result<CirStaticPoolExportSummary, CirStaticPoolError> {
let unique: BTreeSet<String> = sha256_hexes
.iter()
.map(|item| item.to_ascii_lowercase())
.collect();
let mut written_files = 0usize;
let mut reused_files = 0usize;
for sha256_hex in &unique {
let entry = store
.get_raw_by_hash_entry(sha256_hex)
.map_err(|e| CirStaticPoolError::Storage(e.to_string()))?
.ok_or_else(|| CirStaticPoolError::MissingRawByHash {
sha256_hex: sha256_hex.clone(),
})?;
let result = write_raw_entry_to_static_pool(static_root, capture_date_utc, &entry)?;
if result.written {
written_files += 1;
} else {
reused_files += 1;
}
}
Ok(CirStaticPoolExportSummary {
unique_hashes: unique.len(),
written_files,
reused_files,
})
}
fn format_utc_date(date: time::Date) -> String {
format!(
"{:04}{:02}{:02}",
date.year(),
u8::from(date.month()),
date.day()
)
}
fn validate_sha256_hex(sha256_hex: &str) -> Result<(), CirStaticPoolError> {
if sha256_hex.len() != 64 || !sha256_hex.as_bytes().iter().all(u8::is_ascii_hexdigit) {
return Err(CirStaticPoolError::InvalidSha256Hex(
sha256_hex.to_string(),
));
}
Ok(())
}
fn compute_sha256_hex(bytes: &[u8]) -> String {
use sha2::{Digest, Sha256};
hex::encode(Sha256::digest(bytes))
}
#[cfg(test)]
mod tests {
use super::{
CirStaticPoolError, compute_sha256_hex, export_hashes_from_store, static_pool_relative_path,
write_bytes_to_static_pool,
};
use crate::storage::{RawByHashEntry, RepositoryViewEntry, RepositoryViewState, RocksStore};
use std::fs;
fn sample_date() -> time::Date {
time::Date::from_calendar_date(2026, time::Month::April, 7).unwrap()
}
#[test]
fn static_pool_relative_path_uses_date_and_hash_prefixes() {
let path = static_pool_relative_path(
sample_date(),
"abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789",
)
.expect("relative path");
assert_eq!(
path,
std::path::PathBuf::from("20260407")
.join("ab")
.join("cd")
.join("abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789")
);
}
#[test]
fn write_bytes_to_static_pool_is_idempotent_and_leaves_no_temp_files() {
let td = tempfile::tempdir().expect("tempdir");
let bytes = b"static-pool-object";
let sha = compute_sha256_hex(bytes);
let first = write_bytes_to_static_pool(td.path(), sample_date(), &sha, bytes)
.expect("first write");
let second = write_bytes_to_static_pool(td.path(), sample_date(), &sha, bytes)
.expect("second write");
assert!(first.written);
assert!(!second.written);
assert_eq!(fs::read(&first.final_path).expect("read final"), bytes);
let all_files: Vec<_> = walk_files(td.path());
assert_eq!(all_files.len(), 1);
assert!(!all_files[0]
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
.contains(".tmp."));
}
#[test]
fn write_bytes_to_static_pool_rejects_bad_hash_and_empty_bytes() {
let td = tempfile::tempdir().expect("tempdir");
let err = write_bytes_to_static_pool(td.path(), sample_date(), "not-a-hash", b"x")
.expect_err("bad hash must fail");
assert!(matches!(err, CirStaticPoolError::InvalidSha256Hex(_)));
let err = write_bytes_to_static_pool(
td.path(),
sample_date(),
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
b"",
)
.expect_err("empty bytes must fail");
assert!(matches!(err, CirStaticPoolError::EmptyBytes { .. }));
}
#[test]
fn export_hashes_from_store_writes_unique_entries_and_fails_when_missing() {
let td = tempfile::tempdir().expect("tempdir");
let store_dir = td.path().join("db");
let static_root = td.path().join("static");
let store = RocksStore::open(&store_dir).expect("open rocksdb");
let bytes = b"store-object".to_vec();
let sha = compute_sha256_hex(&bytes);
let mut entry = RawByHashEntry::from_bytes(sha.clone(), bytes.clone());
entry.origin_uris.push("rsync://example.test/repo/object.cer".to_string());
store.put_raw_by_hash_entry(&entry).expect("put raw entry");
store
.put_repository_view_entry(&RepositoryViewEntry {
rsync_uri: "rsync://example.test/repo/object.cer".to_string(),
current_hash: Some(sha.clone()),
repository_source: Some("https://rrdp.example.test/notification.xml".to_string()),
object_type: Some("cer".to_string()),
state: RepositoryViewState::Present,
})
.expect("put repository view");
let summary = export_hashes_from_store(
&store,
&static_root,
sample_date(),
&[sha.clone(), sha.clone()],
)
.expect("export hashes");
assert_eq!(summary.unique_hashes, 1);
assert_eq!(summary.written_files, 1);
assert_eq!(summary.reused_files, 0);
let summary = export_hashes_from_store(&store, &static_root, sample_date(), &[sha.clone()])
.expect("re-export hashes");
assert_eq!(summary.unique_hashes, 1);
assert_eq!(summary.written_files, 0);
assert_eq!(summary.reused_files, 1);
let err = export_hashes_from_store(
&store,
&static_root,
sample_date(),
&[String::from(
"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
)],
)
.expect_err("missing raw_by_hash must fail");
assert!(matches!(err, CirStaticPoolError::MissingRawByHash { .. }));
}
fn walk_files(root: &std::path::Path) -> Vec<std::path::PathBuf> {
let mut out = Vec::new();
let mut stack = vec![root.to_path_buf()];
while let Some(path) = stack.pop() {
for entry in fs::read_dir(path).expect("read_dir") {
let entry = entry.expect("dir entry");
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else {
out.push(path);
}
}
}
out.sort();
out
}
}

View File

@ -1,4 +1,5 @@
use crate::ccr::{build_ccr_from_run, write_ccr_file};
use crate::cir::export_cir_from_run;
use std::path::{Path, PathBuf};
use crate::analysis::timing::{TimingHandle, TimingMeta, TimingMetaUpdate};
@ -32,6 +33,10 @@ pub struct CliArgs {
pub policy_path: Option<PathBuf>,
pub report_json_path: Option<PathBuf>,
pub ccr_out_path: Option<PathBuf>,
pub cir_enabled: bool,
pub cir_out_path: Option<PathBuf>,
pub cir_static_root: Option<PathBuf>,
pub cir_tal_uri: Option<String>,
pub payload_replay_archive: Option<PathBuf>,
pub payload_replay_locks: Option<PathBuf>,
pub payload_base_archive: Option<PathBuf>,
@ -41,6 +46,8 @@ pub struct CliArgs {
pub payload_delta_locks: Option<PathBuf>,
pub rsync_local_dir: Option<PathBuf>,
pub disable_rrdp: bool,
pub rsync_command: Option<PathBuf>,
pub http_timeout_secs: u64,
pub rsync_timeout_secs: u64,
@ -67,6 +74,10 @@ Options:
--policy <path> Policy TOML path (optional)
--report-json <path> Write full audit report as JSON (optional)
--ccr-out <path> Write CCR DER ContentInfo to this path (optional)
--cir-enable Export CIR after the run completes
--cir-out <path> Write CIR DER to this path (requires --cir-enable)
--cir-static-root <path> Shared static pool root for CIR export (requires --cir-enable)
--cir-tal-uri <url> Override TAL URI for CIR export when using --tal-path (optional)
--payload-replay-archive <path> Use local payload replay archive root (offline replay mode)
--payload-replay-locks <path> Use local payload replay locks.json (offline replay mode)
--payload-base-archive <path> Use local base payload archive root (offline delta replay)
@ -80,6 +91,8 @@ Options:
--ta-path <path> TA certificate DER file path (offline-friendly)
--rsync-local-dir <path> Use LocalDirRsyncFetcher rooted at this directory (offline tests)
--disable-rrdp Disable RRDP and synchronize only via rsync
--rsync-command <path> Use this rsync command instead of the default rsync binary
--http-timeout-secs <n> HTTP fetch timeout seconds (default: 20)
--rsync-timeout-secs <n> rsync I/O timeout seconds (default: 60)
--rsync-mirror-root <path> Persist rsync mirrors under this directory (default: disabled)
@ -103,6 +116,10 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let mut policy_path: Option<PathBuf> = None;
let mut report_json_path: Option<PathBuf> = None;
let mut ccr_out_path: Option<PathBuf> = None;
let mut cir_enabled: bool = false;
let mut cir_out_path: Option<PathBuf> = None;
let mut cir_static_root: Option<PathBuf> = None;
let mut cir_tal_uri: Option<String> = None;
let mut payload_replay_archive: Option<PathBuf> = None;
let mut payload_replay_locks: Option<PathBuf> = None;
let mut payload_base_archive: Option<PathBuf> = None;
@ -112,6 +129,8 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let mut payload_delta_locks: Option<PathBuf> = None;
let mut rsync_local_dir: Option<PathBuf> = None;
let mut disable_rrdp: bool = false;
let mut rsync_command: Option<PathBuf> = None;
let mut http_timeout_secs: u64 = 20;
let mut rsync_timeout_secs: u64 = 60;
let mut rsync_mirror_root: Option<PathBuf> = None;
@ -161,6 +180,24 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let v = argv.get(i).ok_or("--ccr-out requires a value")?;
ccr_out_path = Some(PathBuf::from(v));
}
"--cir-enable" => {
cir_enabled = true;
}
"--cir-out" => {
i += 1;
let v = argv.get(i).ok_or("--cir-out requires a value")?;
cir_out_path = Some(PathBuf::from(v));
}
"--cir-static-root" => {
i += 1;
let v = argv.get(i).ok_or("--cir-static-root requires a value")?;
cir_static_root = Some(PathBuf::from(v));
}
"--cir-tal-uri" => {
i += 1;
let v = argv.get(i).ok_or("--cir-tal-uri requires a value")?;
cir_tal_uri = Some(v.clone());
}
"--payload-replay-archive" => {
i += 1;
let v = argv
@ -215,6 +252,14 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
let v = argv.get(i).ok_or("--rsync-local-dir requires a value")?;
rsync_local_dir = Some(PathBuf::from(v));
}
"--disable-rrdp" => {
disable_rrdp = true;
}
"--rsync-command" => {
i += 1;
let v = argv.get(i).ok_or("--rsync-command requires a value")?;
rsync_command = Some(PathBuf::from(v));
}
"--http-timeout-secs" => {
i += 1;
let v = argv.get(i).ok_or("--http-timeout-secs requires a value")?;
@ -278,9 +323,28 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
usage()
));
}
if tal_path.is_some() && ta_path.is_none() {
if tal_path.is_some() && ta_path.is_none() && !disable_rrdp {
return Err(format!(
"--tal-path requires --ta-path (offline-friendly mode)\n\n{}",
"--tal-path requires --ta-path unless --disable-rrdp is set\n\n{}",
usage()
));
}
if cir_enabled && (cir_out_path.is_none() || cir_static_root.is_none()) {
return Err(format!(
"--cir-enable requires both --cir-out and --cir-static-root\n\n{}",
usage()
));
}
if !cir_enabled && (cir_out_path.is_some() || cir_static_root.is_some() || cir_tal_uri.is_some())
{
return Err(format!(
"--cir-out/--cir-static-root/--cir-tal-uri require --cir-enable\n\n{}",
usage()
));
}
if cir_enabled && tal_path.is_some() && cir_tal_uri.is_none() {
return Err(format!(
"CIR export in --tal-path mode requires --cir-tal-uri\n\n{}",
usage()
));
}
@ -377,6 +441,10 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
policy_path,
report_json_path,
ccr_out_path,
cir_enabled,
cir_out_path,
cir_static_root,
cir_tal_uri,
payload_replay_archive,
payload_replay_locks,
payload_base_archive,
@ -385,6 +453,8 @@ pub fn parse_args(argv: &[String]) -> Result<CliArgs, String> {
payload_delta_archive,
payload_delta_locks,
rsync_local_dir,
disable_rrdp,
rsync_command,
http_timeout_secs,
rsync_timeout_secs,
rsync_mirror_root,
@ -509,7 +579,10 @@ fn build_report(
pub fn run(argv: &[String]) -> Result<(), String> {
let args = parse_args(argv)?;
let policy = read_policy(args.policy_path.as_deref())?;
let mut policy = read_policy(args.policy_path.as_deref())?;
if args.disable_rrdp {
policy.sync_preference = crate::policy::SyncPreference::RsyncOnly;
}
let validation_time = args
.validation_time
.unwrap_or_else(time::OffsetDateTime::now_utc);
@ -767,6 +840,37 @@ pub fn run(argv: &[String]) -> Result<(), String> {
.map_err(|e| e.to_string())?
}
}
(None, Some(tal_path), None) => {
let tal_bytes = std::fs::read(tal_path)
.map_err(|e| format!("read tal failed: {}: {e}", tal_path.display()))?;
let tal_uri = args.cir_tal_uri.clone();
if let Some((_, t)) = timing.as_ref() {
crate::validation::run_tree_from_tal::run_tree_from_tal_bytes_serial_audit_with_timing(
&store,
&policy,
&tal_bytes,
tal_uri,
&http,
&rsync,
validation_time,
&config,
t,
)
.map_err(|e| e.to_string())?
} else {
crate::validation::run_tree_from_tal::run_tree_from_tal_bytes_serial_audit(
&store,
&policy,
&tal_bytes,
tal_uri,
&http,
&rsync,
validation_time,
&config,
)
.map_err(|e| e.to_string())?
}
}
_ => unreachable!("validated by parse_args"),
}
} else {
@ -776,6 +880,10 @@ pub fn run(argv: &[String]) -> Result<(), String> {
})
.map_err(|e| e.to_string())?;
let rsync = SystemRsyncFetcher::new(SystemRsyncConfig {
rsync_bin: args
.rsync_command
.clone()
.unwrap_or_else(|| PathBuf::from("rsync")),
timeout: std::time::Duration::from_secs(args.rsync_timeout_secs.max(1)),
mirror_root: args.rsync_mirror_root.clone(),
..SystemRsyncConfig::default()
@ -845,6 +953,37 @@ pub fn run(argv: &[String]) -> Result<(), String> {
.map_err(|e| e.to_string())?
}
}
(None, Some(tal_path), None) => {
let tal_bytes = std::fs::read(tal_path)
.map_err(|e| format!("read tal failed: {}: {e}", tal_path.display()))?;
let tal_uri = args.cir_tal_uri.clone();
if let Some((_, t)) = timing.as_ref() {
crate::validation::run_tree_from_tal::run_tree_from_tal_bytes_serial_audit_with_timing(
&store,
&policy,
&tal_bytes,
tal_uri,
&http,
&rsync,
validation_time,
&config,
t,
)
.map_err(|e| e.to_string())?
} else {
crate::validation::run_tree_from_tal::run_tree_from_tal_bytes_serial_audit(
&store,
&policy,
&tal_bytes,
tal_uri,
&http,
&rsync,
validation_time,
&config,
)
.map_err(|e| e.to_string())?
}
}
_ => unreachable!("validated by parse_args"),
}
};
@ -880,6 +1019,40 @@ pub fn run(argv: &[String]) -> Result<(), String> {
eprintln!("wrote CCR: {}", path.display());
}
if args.cir_enabled {
let cir_tal_uri = args
.tal_url
.clone()
.or(args.cir_tal_uri.clone())
.ok_or_else(|| "CIR export requires a TAL URI source".to_string())?;
let cir_out_path = args
.cir_out_path
.as_deref()
.expect("validated by parse_args for cir");
let cir_static_root = args
.cir_static_root
.as_deref()
.expect("validated by parse_args for cir");
let summary = export_cir_from_run(
&store,
&out.discovery.trust_anchor,
&cir_tal_uri,
validation_time,
cir_out_path,
cir_static_root,
time::OffsetDateTime::now_utc().date(),
)
.map_err(|e| e.to_string())?;
eprintln!(
"wrote CIR: {} (objects={}, tals={}, static_written={}, static_reused={})",
cir_out_path.display(),
summary.object_count,
summary.tal_count,
summary.static_pool.written_files,
summary.static_pool.reused_files
);
}
let report = build_report(&policy, validation_time, out);
if let Some(p) = args.report_json_path.as_deref() {
@ -1014,6 +1187,80 @@ mod tests {
assert_eq!(args.ccr_out_path.as_deref(), Some(std::path::Path::new("out/example.ccr")));
}
#[test]
fn parse_accepts_cir_enable_with_required_paths_and_tal_override() {
let argv = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--tal-path".to_string(),
"x.tal".to_string(),
"--ta-path".to_string(),
"x.cer".to_string(),
"--rsync-local-dir".to_string(),
"repo".to_string(),
"--cir-enable".to_string(),
"--cir-out".to_string(),
"out/example.cir".to_string(),
"--cir-static-root".to_string(),
"out/static".to_string(),
"--cir-tal-uri".to_string(),
"https://example.test/root.tal".to_string(),
];
let args = parse_args(&argv).expect("parse args");
assert!(args.cir_enabled);
assert_eq!(args.cir_out_path.as_deref(), Some(std::path::Path::new("out/example.cir")));
assert_eq!(args.cir_static_root.as_deref(), Some(std::path::Path::new("out/static")));
assert_eq!(args.cir_tal_uri.as_deref(), Some("https://example.test/root.tal"));
}
#[test]
fn parse_rejects_incomplete_or_invalid_cir_flags() {
let argv_missing = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--tal-url".to_string(),
"https://example.test/root.tal".to_string(),
"--cir-enable".to_string(),
"--cir-out".to_string(),
"out/example.cir".to_string(),
];
let err = parse_args(&argv_missing).unwrap_err();
assert!(err.contains("--cir-enable requires both --cir-out and --cir-static-root"), "{err}");
let argv_needs_enable = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--tal-url".to_string(),
"https://example.test/root.tal".to_string(),
"--cir-out".to_string(),
"out/example.cir".to_string(),
];
let err = parse_args(&argv_needs_enable).unwrap_err();
assert!(err.contains("require --cir-enable"), "{err}");
let argv_offline_missing_uri = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--tal-path".to_string(),
"x.tal".to_string(),
"--ta-path".to_string(),
"x.cer".to_string(),
"--rsync-local-dir".to_string(),
"repo".to_string(),
"--cir-enable".to_string(),
"--cir-out".to_string(),
"out/example.cir".to_string(),
"--cir-static-root".to_string(),
"out/static".to_string(),
];
let err = parse_args(&argv_offline_missing_uri).unwrap_err();
assert!(err.contains("requires --cir-tal-uri"), "{err}");
}
#[test]
fn parse_rejects_invalid_validation_time() {
let argv = vec![
@ -1114,6 +1361,28 @@ mod tests {
assert_eq!(args.max_depth, Some(0));
}
#[test]
fn parse_accepts_tal_path_without_ta_when_disable_rrdp_is_set() {
let argv = vec![
"rpki".to_string(),
"--db".to_string(),
"db".to_string(),
"--tal-path".to_string(),
"a.tal".to_string(),
"--disable-rrdp".to_string(),
"--rsync-command".to_string(),
"/tmp/fake-rsync".to_string(),
];
let args = parse_args(&argv).expect("parse");
assert_eq!(args.tal_path.as_deref(), Some(Path::new("a.tal")));
assert!(args.ta_path.is_none());
assert!(args.disable_rrdp);
assert_eq!(
args.rsync_command.as_deref(),
Some(Path::new("/tmp/fake-rsync"))
);
}
#[test]
fn parse_accepts_payload_delta_replay_mode_with_offline_tal_and_ta() {
let argv = vec![

View File

@ -9,4 +9,5 @@ pub mod signed_object;
pub mod ta;
pub mod tal;
#[cfg(feature = "full")]
pub mod router_cert;

View File

@ -98,7 +98,8 @@ impl SystemRsyncFetcher {
}
fn module_fetch_uri(&self, rsync_base_uri: &str) -> String {
rsync_capture_scope_uri(rsync_base_uri).unwrap_or_else(|| normalize_rsync_base_uri(rsync_base_uri))
rsync_module_root_uri(rsync_base_uri)
.unwrap_or_else(|| normalize_rsync_base_uri(rsync_base_uri))
}
}
@ -171,7 +172,7 @@ impl Drop for TempDir {
}
}
fn rsync_capture_scope_uri(s: &str) -> Option<String> {
fn rsync_module_root_uri(s: &str) -> Option<String> {
let normalized = normalize_rsync_base_uri(s);
let rest = normalized.strip_prefix("rsync://")?;
let mut host_and_path = rest.splitn(2, '/');
@ -181,10 +182,8 @@ fn rsync_capture_scope_uri(s: &str) -> Option<String> {
if segments.is_empty() {
return None;
}
if segments.len() >= 4 {
segments.pop();
}
Some(format!("rsync://{authority}/{}/", segments.join("/")))
let module = segments.remove(0);
Some(format!("rsync://{authority}/{module}/"))
}
fn walk_dir_collect(
@ -283,28 +282,28 @@ mod tests {
}
#[test]
fn rsync_capture_scope_uri_widens_only_deep_publication_points() {
fn rsync_module_root_uri_returns_host_and_module_only() {
assert_eq!(
rsync_capture_scope_uri("rsync://example.net/repo/ta/ca/publication-point/"),
Some("rsync://example.net/repo/ta/ca/".to_string())
);
assert_eq!(
rsync_capture_scope_uri("rsync://example.net/repo/ta/"),
Some("rsync://example.net/repo/ta/".to_string())
);
assert_eq!(
rsync_capture_scope_uri("rsync://example.net/repo/"),
rsync_module_root_uri("rsync://example.net/repo/ta/ca/publication-point/"),
Some("rsync://example.net/repo/".to_string())
);
assert_eq!(rsync_capture_scope_uri("https://example.net/repo"), None);
assert_eq!(
rsync_module_root_uri("rsync://example.net/repo/ta/"),
Some("rsync://example.net/repo/".to_string())
);
assert_eq!(
rsync_module_root_uri("rsync://example.net/repo/"),
Some("rsync://example.net/repo/".to_string())
);
assert_eq!(rsync_module_root_uri("https://example.net/repo"), None);
}
#[test]
fn system_rsync_dedup_key_uses_capture_scope() {
fn system_rsync_dedup_key_uses_module_root() {
let fetcher = SystemRsyncFetcher::new(SystemRsyncConfig::default());
assert_eq!(
fetcher.dedup_key("rsync://example.net/repo/ta/ca/publication-point/"),
"rsync://example.net/repo/ta/ca/"
"rsync://example.net/repo/"
);
}

View File

@ -1,4 +1,5 @@
pub mod ccr;
pub mod cir;
pub mod data_model;
#[cfg(feature = "full")]

View File

@ -558,11 +558,12 @@ fn rsync_sync_into_current_store(
download_log: Option<&DownloadLogHandle>,
) -> Result<usize, RepoSyncError> {
let started = std::time::Instant::now();
let sync_scope_uri = rsync_fetcher.dedup_key(rsync_base_uri);
crate::progress_log::emit(
"rsync_sync_start",
serde_json::json!({
"rsync_base_uri": rsync_base_uri,
"dedup_key": rsync_fetcher.dedup_key(rsync_base_uri),
"sync_scope_uri": &sync_scope_uri,
}),
);
let _s = timing
@ -613,6 +614,7 @@ fn rsync_sync_into_current_store(
"rsync_sync_fetch_done",
serde_json::json!({
"rsync_base_uri": rsync_base_uri,
"sync_scope_uri": &sync_scope_uri,
"object_count": object_count,
"bytes_total": bytes_total,
"duration_ms": started.elapsed().as_millis() as u64,
@ -625,7 +627,7 @@ fn rsync_sync_into_current_store(
drop(_p);
let existing_view = store
.list_repository_view_entries_with_prefix(rsync_base_uri)
.list_repository_view_entries_with_prefix(&sync_scope_uri)
.map_err(|e| RepoSyncError::Storage(e.to_string()))?;
let _proj = timing
@ -669,7 +671,7 @@ fn rsync_sync_into_current_store(
for entry in existing_view {
if !new_set.contains(&entry.rsync_uri) {
repository_view_entries.push(build_repository_view_withdrawn_entry(
rsync_base_uri,
&sync_scope_uri,
&entry.rsync_uri,
entry.current_hash,
));
@ -682,7 +684,7 @@ fn rsync_sync_into_current_store(
.cloned()
.ok_or_else(|| RepoSyncError::Storage(format!("missing raw_by_hash mapping for {uri}")))?;
repository_view_entries.push(build_repository_view_present_entry(
rsync_base_uri,
&sync_scope_uri,
uri,
&current_hash,
));
@ -700,6 +702,7 @@ fn rsync_sync_into_current_store(
"rsync_sync_done",
serde_json::json!({
"rsync_base_uri": rsync_base_uri,
"sync_scope_uri": &sync_scope_uri,
"object_count": object_count,
"bytes_total": bytes_total,
"duration_ms": total_duration_ms,
@ -710,6 +713,7 @@ fn rsync_sync_into_current_store(
"rsync_sync_slow",
serde_json::json!({
"rsync_base_uri": rsync_base_uri,
"sync_scope_uri": &sync_scope_uri,
"object_count": object_count,
"bytes_total": bytes_total,
"duration_ms": total_duration_ms,
@ -731,6 +735,8 @@ mod tests {
use crate::replay::delta_fetch_rsync::PayloadDeltaReplayRsyncFetcher;
use crate::replay::fetch_http::PayloadReplayHttpFetcher;
use crate::replay::fetch_rsync::PayloadReplayRsyncFetcher;
use crate::storage::RepositoryViewState;
use crate::sync::store_projection::build_repository_view_present_entry;
use crate::sync::rrdp::Fetcher as HttpFetcher;
use crate::sync::rrdp::RrdpState;
use base64::Engine;
@ -779,6 +785,62 @@ mod tests {
);
}
#[test]
fn rsync_sync_uses_fetcher_dedup_scope_for_repository_view_projection() {
struct ScopeFetcher;
impl RsyncFetcher for ScopeFetcher {
fn fetch_objects(
&self,
_rsync_base_uri: &str,
) -> Result<Vec<(String, Vec<u8>)>, RsyncFetchError> {
Ok(vec![(
"rsync://example.net/repo/child/a.mft".to_string(),
b"manifest".to_vec(),
)])
}
fn dedup_key(&self, _rsync_base_uri: &str) -> String {
"rsync://example.net/repo/".to_string()
}
}
let td = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(td.path()).expect("open rocksdb");
let seeded = build_repository_view_present_entry(
"rsync://example.net/repo/",
"rsync://example.net/repo/sibling/old.roa",
&compute_sha256_hex(b"old"),
);
store
.put_projection_batch(&[seeded], &[], &[])
.expect("seed repository view");
let fetcher = ScopeFetcher;
let written = rsync_sync_into_current_store(
&store,
"rsync://example.net/repo/child/",
&fetcher,
None,
None,
)
.expect("sync ok");
assert_eq!(written, 1);
let entries = store
.list_repository_view_entries_with_prefix("rsync://example.net/repo/")
.expect("list repository view");
let sibling = entries
.iter()
.find(|entry| entry.rsync_uri == "rsync://example.net/repo/sibling/old.roa")
.expect("sibling entry exists");
assert_eq!(sibling.state, RepositoryViewState::Withdrawn);
let child = entries
.iter()
.find(|entry| entry.rsync_uri == "rsync://example.net/repo/child/a.mft")
.expect("child entry exists");
assert_eq!(child.state, RepositoryViewState::Present);
}
fn notification_xml(
session_id: &str,
serial: u64,

View File

@ -2,6 +2,7 @@ use url::Url;
use crate::data_model::ta::{TrustAnchor, TrustAnchorError};
use crate::data_model::tal::{Tal, TalDecodeError};
use crate::fetch::rsync::RsyncFetcher;
use crate::sync::rrdp::Fetcher;
use crate::validation::ca_instance::{
CaInstanceUris, CaInstanceUrisError, ca_instance_uris_from_ca_certificate,
@ -104,6 +105,108 @@ pub fn discover_root_ca_instance_from_tal(
})))
}
pub fn discover_root_ca_instance_from_tal_with_fetchers(
http_fetcher: &dyn Fetcher,
rsync_fetcher: &dyn RsyncFetcher,
tal: Tal,
tal_url: Option<String>,
) -> Result<DiscoveredRootCaInstance, FromTalError> {
if tal.ta_uris.is_empty() {
return Err(FromTalError::NoTaUris);
}
let mut last_err: Option<String> = None;
let mut ta_uris = tal.ta_uris.clone();
ta_uris.sort_by_key(|uri| if uri.scheme() == "rsync" { 0 } else { 1 });
for ta_uri in ta_uris.iter() {
let ta_der = match fetch_ta_der(http_fetcher, rsync_fetcher, ta_uri) {
Ok(b) => b,
Err(e) => {
last_err = Some(format!("fetch {ta_uri} failed: {e}"));
continue;
}
};
let trust_anchor = match TrustAnchor::bind_der(tal.clone(), &ta_der, Some(ta_uri)) {
Ok(ta) => ta,
Err(e) => {
last_err = Some(format!("bind {ta_uri} failed: {e}"));
continue;
}
};
let ca_instance =
match ca_instance_uris_from_ca_certificate(&trust_anchor.ta_certificate.rc_ca) {
Ok(v) => v,
Err(e) => {
last_err = Some(format!("CA instance discovery failed: {e}"));
continue;
}
};
return Ok(DiscoveredRootCaInstance {
tal_url,
trust_anchor,
ca_instance,
});
}
Err(FromTalError::TaFetch(last_err.unwrap_or_else(|| {
"unknown TA candidate error".to_string()
})))
}
fn fetch_ta_der(
http_fetcher: &dyn Fetcher,
rsync_fetcher: &dyn RsyncFetcher,
ta_uri: &Url,
) -> Result<Vec<u8>, String> {
match ta_uri.scheme() {
"https" | "http" => http_fetcher.fetch(ta_uri.as_str()),
"rsync" => fetch_ta_der_via_rsync(rsync_fetcher, ta_uri.as_str()),
scheme => Err(format!("unsupported TA URI scheme: {scheme}")),
}
}
fn fetch_ta_der_via_rsync(
rsync_fetcher: &dyn RsyncFetcher,
ta_rsync_uri: &str,
) -> Result<Vec<u8>, String> {
let base = rsync_parent_uri(ta_rsync_uri)?;
let objects = rsync_fetcher
.fetch_objects(&base)
.map_err(|e| e.to_string())?;
objects
.into_iter()
.find(|(uri, _)| uri == ta_rsync_uri)
.map(|(_, bytes)| bytes)
.ok_or_else(|| format!("TA rsync object not found in fetched subtree: {ta_rsync_uri}"))
}
fn rsync_parent_uri(ta_rsync_uri: &str) -> Result<String, String> {
let url = Url::parse(ta_rsync_uri).map_err(|e| e.to_string())?;
if url.scheme() != "rsync" {
return Err(format!("not an rsync URI: {ta_rsync_uri}"));
}
let host = url
.host_str()
.ok_or_else(|| format!("missing host in rsync URI: {ta_rsync_uri}"))?;
let segments = url
.path_segments()
.ok_or_else(|| format!("missing path in rsync URI: {ta_rsync_uri}"))?
.collect::<Vec<_>>();
if segments.is_empty() || segments.last().copied().unwrap_or_default().is_empty() {
return Err(format!("rsync URI must reference a file object: {ta_rsync_uri}"));
}
let parent_segments = &segments[..segments.len() - 1];
let mut parent = format!("rsync://{host}/");
if !parent_segments.is_empty() {
parent.push_str(&parent_segments.join("/"));
parent.push('/');
}
Ok(parent)
}
pub fn discover_root_ca_instance_from_tal_and_ta_der(
tal_bytes: &[u8],
ta_der: &[u8],
@ -119,6 +222,58 @@ pub fn discover_root_ca_instance_from_tal_and_ta_der(
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::fetch::rsync::LocalDirRsyncFetcher;
#[test]
fn discover_root_ca_instance_from_tal_with_fetchers_supports_rsync_ta_uri() {
let tal_bytes = std::fs::read(
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/tal/apnic-rfc7730-https.tal"),
)
.unwrap();
let ta_der = std::fs::read(
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/ta/apnic-ta.cer"),
)
.unwrap();
let tal = Tal::decode_bytes(&tal_bytes).unwrap();
let rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.unwrap()
.clone();
let td = tempfile::tempdir().unwrap();
let mirror_root = td.path().join(rsync_uri.host_str().unwrap()).join("repository");
std::fs::create_dir_all(&mirror_root).unwrap();
std::fs::write(
mirror_root.join("apnic-rpki-root-iana-origin.cer"),
ta_der,
)
.unwrap();
let http = crate::fetch::http::BlockingHttpFetcher::new(
crate::fetch::http::HttpFetcherConfig::default(),
)
.unwrap();
let rsync = LocalDirRsyncFetcher::new(
td.path().join(rsync_uri.host_str().unwrap()).join("repository"),
);
let discovery = discover_root_ca_instance_from_tal_with_fetchers(&http, &rsync, tal, None)
.expect("discover via rsync TA");
assert!(discovery
.trust_anchor
.resolved_ta_uri
.unwrap()
.as_str()
.starts_with("rsync://"));
}
}
pub fn run_root_from_tal_url_once(
store: &crate::storage::RocksStore,
policy: &crate::policy::Policy,

View File

@ -15,6 +15,7 @@ use crate::replay::fetch_rsync::PayloadReplayRsyncFetcher;
use crate::sync::rrdp::Fetcher;
use crate::validation::from_tal::{
DiscoveredRootCaInstance, FromTalError, discover_root_ca_instance_from_tal_and_ta_der,
discover_root_ca_instance_from_tal_with_fetchers,
discover_root_ca_instance_from_tal_url,
};
use crate::validation::tree::{
@ -295,6 +296,117 @@ pub fn run_tree_from_tal_and_ta_der_serial(
Ok(RunTreeFromTalOutput { discovery, tree })
}
pub fn run_tree_from_tal_bytes_serial_audit(
store: &crate::storage::RocksStore,
policy: &crate::policy::Policy,
tal_bytes: &[u8],
tal_uri: Option<String>,
http_fetcher: &dyn Fetcher,
rsync_fetcher: &dyn crate::fetch::rsync::RsyncFetcher,
validation_time: time::OffsetDateTime,
config: &TreeRunConfig,
) -> Result<RunTreeFromTalAuditOutput, RunTreeFromTalError> {
let tal = crate::data_model::tal::Tal::decode_bytes(tal_bytes).map_err(FromTalError::from)?;
let discovery =
discover_root_ca_instance_from_tal_with_fetchers(http_fetcher, rsync_fetcher, tal, tal_uri)?;
let download_log = DownloadLogHandle::new();
let runner = Rpkiv1PublicationPointRunner {
store,
policy,
http_fetcher,
rsync_fetcher,
validation_time,
timing: None,
download_log: Some(download_log.clone()),
replay_archive_index: None,
replay_delta_index: None,
rrdp_dedup: true,
rrdp_repo_cache: Mutex::new(HashMap::new()),
rsync_dedup: true,
rsync_repo_cache: Mutex::new(HashMap::new()),
};
let root = root_handle_from_trust_anchor(
&discovery.trust_anchor,
derive_tal_id(&discovery),
None,
&discovery.ca_instance,
);
let TreeRunAuditOutput {
tree,
publication_points,
} = run_tree_serial_audit(root, &runner, config)?;
let downloads = download_log.snapshot_events();
let download_stats = DownloadLogHandle::stats_from_events(&downloads);
Ok(RunTreeFromTalAuditOutput {
discovery,
tree,
publication_points,
downloads,
download_stats,
})
}
pub fn run_tree_from_tal_bytes_serial_audit_with_timing(
store: &crate::storage::RocksStore,
policy: &crate::policy::Policy,
tal_bytes: &[u8],
tal_uri: Option<String>,
http_fetcher: &dyn Fetcher,
rsync_fetcher: &dyn crate::fetch::rsync::RsyncFetcher,
validation_time: time::OffsetDateTime,
config: &TreeRunConfig,
timing: &TimingHandle,
) -> Result<RunTreeFromTalAuditOutput, RunTreeFromTalError> {
let _tal = timing.span_phase("tal_bootstrap");
let tal = crate::data_model::tal::Tal::decode_bytes(tal_bytes).map_err(FromTalError::from)?;
let discovery =
discover_root_ca_instance_from_tal_with_fetchers(http_fetcher, rsync_fetcher, tal, tal_uri)?;
drop(_tal);
let download_log = DownloadLogHandle::new();
let runner = Rpkiv1PublicationPointRunner {
store,
policy,
http_fetcher,
rsync_fetcher,
validation_time,
timing: Some(timing.clone()),
download_log: Some(download_log.clone()),
replay_archive_index: None,
replay_delta_index: None,
rrdp_dedup: true,
rrdp_repo_cache: Mutex::new(HashMap::new()),
rsync_dedup: true,
rsync_repo_cache: Mutex::new(HashMap::new()),
};
let root = root_handle_from_trust_anchor(
&discovery.trust_anchor,
derive_tal_id(&discovery),
None,
&discovery.ca_instance,
);
let _tree = timing.span_phase("tree_run");
let TreeRunAuditOutput {
tree,
publication_points,
} = run_tree_serial_audit(root, &runner, config)?;
drop(_tree);
let downloads = download_log.snapshot_events();
let download_stats = DownloadLogHandle::stats_from_events(&downloads);
Ok(RunTreeFromTalAuditOutput {
discovery,
tree,
publication_points,
downloads,
download_stats,
})
}
pub fn run_tree_from_tal_and_ta_der_serial_audit(
store: &crate::storage::RocksStore,
policy: &crate::policy::Policy,

View File

@ -111,7 +111,11 @@ impl<'a> PublicationPointRunner for Rpkiv1PublicationPointRunner<'a> {
let attempted_rrdp =
self.policy.sync_preference == crate::policy::SyncPreference::RrdpThenRsync;
let original_notification_uri = ca.rrdp_notification_uri.as_deref();
let mut effective_notification_uri = original_notification_uri;
let mut effective_notification_uri = if attempted_rrdp {
original_notification_uri
} else {
None
};
let mut skip_sync_due_to_dedup = false;
if attempted_rrdp && self.rrdp_dedup {
@ -4129,6 +4133,116 @@ authorityKeyIdentifier = keyid:always
assert_eq!(calls.load(Ordering::SeqCst), 1, "module-scope dedup should skip second sync");
}
#[test]
fn runner_rsync_dedup_works_in_rsync_only_mode_even_when_rrdp_notify_exists() {
let fixture_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/repository/rpki.cernet.net/repo/cernet/0");
assert!(fixture_dir.is_dir(), "fixture directory must exist");
let first_base_uri = "rsync://rpki.cernet.net/repo/cernet/0/".to_string();
let second_base_uri = "rsync://rpki.cernet.net/repo/cernet/0/sub/".to_string();
let manifest_file = "05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft";
let manifest_rsync_uri = format!("{first_base_uri}{manifest_file}");
let fixture_manifest_bytes =
std::fs::read(fixture_dir.join(manifest_file)).expect("read manifest fixture");
let fixture_manifest =
crate::data_model::manifest::ManifestObject::decode_der(&fixture_manifest_bytes)
.expect("decode manifest fixture");
let validation_time = fixture_manifest.manifest.this_update + time::Duration::seconds(60);
let store_dir = tempfile::tempdir().expect("store dir");
let store = RocksStore::open(store_dir.path()).expect("open rocksdb");
let policy = Policy {
sync_preference: crate::policy::SyncPreference::RsyncOnly,
..Policy::default()
};
let issuer_ca_der = std::fs::read(
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
),
)
.expect("read issuer ca fixture");
let issuer_ca = ResourceCertificate::decode_der(&issuer_ca_der).expect("decode issuer ca");
let handle = CaInstanceHandle {
depth: 0,
tal_id: "test-tal".to_string(),
parent_manifest_rsync_uri: None,
ca_certificate_der: issuer_ca_der,
ca_certificate_rsync_uri: Some("rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer".to_string()),
effective_ip_resources: issuer_ca.tbs.extensions.ip_resources.clone(),
effective_as_resources: issuer_ca.tbs.extensions.as_resources.clone(),
rsync_base_uri: first_base_uri.clone(),
manifest_rsync_uri: manifest_rsync_uri.clone(),
publication_point_rsync_uri: first_base_uri.clone(),
rrdp_notification_uri: Some("https://rrdp.example.test/notification.xml".to_string()),
};
let second_handle = CaInstanceHandle {
rsync_base_uri: second_base_uri.clone(),
publication_point_rsync_uri: second_base_uri.clone(),
..handle.clone()
};
struct ModuleScopeRsyncFetcher {
inner: LocalDirRsyncFetcher,
calls: Arc<AtomicUsize>,
}
impl RsyncFetcher for ModuleScopeRsyncFetcher {
fn fetch_objects(
&self,
rsync_base_uri: &str,
) -> Result<Vec<(String, Vec<u8>)>, RsyncFetchError> {
self.calls.fetch_add(1, Ordering::SeqCst);
self.inner.fetch_objects(rsync_base_uri)
}
fn dedup_key(&self, _rsync_base_uri: &str) -> String {
"rsync://rpki.cernet.net/repo/".to_string()
}
}
let calls = Arc::new(AtomicUsize::new(0));
let rsync = ModuleScopeRsyncFetcher {
inner: LocalDirRsyncFetcher::new(&fixture_dir),
calls: calls.clone(),
};
let runner = Rpkiv1PublicationPointRunner {
store: &store,
policy: &policy,
http_fetcher: &NeverHttpFetcher,
rsync_fetcher: &rsync,
validation_time,
timing: None,
download_log: None,
replay_archive_index: None,
replay_delta_index: None,
rrdp_dedup: true,
rrdp_repo_cache: Mutex::new(HashMap::new()),
rsync_dedup: true,
rsync_repo_cache: Mutex::new(HashMap::new()),
};
let first = runner.run_publication_point(&handle).expect("first run ok");
assert_eq!(first.source, PublicationPointSource::Fresh);
let second = runner
.run_publication_point(&second_handle)
.expect("second run ok");
assert!(matches!(
second.source,
PublicationPointSource::Fresh | PublicationPointSource::VcirCurrentInstance
));
assert_eq!(
calls.load(Ordering::SeqCst),
1,
"rsync-only mode must deduplicate by rsync scope even when RRDP notification is present"
);
}
#[test]
fn runner_when_repo_sync_fails_uses_current_instance_vcir_and_keeps_children_empty_for_fixture()
{

151
tests/test_cir_matrix_m9.rs Normal file
View File

@ -0,0 +1,151 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use rpki::cir::{
encode_cir, materialize_cir, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal,
CIR_VERSION_V1,
};
fn apnic_tal_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/tal/apnic-rfc7730-https.tal")
}
fn apnic_ta_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer")
}
fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
let tal_bytes = std::fs::read(apnic_tal_path()).expect("read tal");
let ta_bytes = std::fs::read(apnic_ta_path()).expect("read ta");
let tal = rpki::data_model::tal::Tal::decode_bytes(&tal_bytes).expect("decode tal");
let ta_rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.expect("tal has rsync uri")
.as_str()
.to_string();
let ta_hash = {
use sha2::{Digest, Sha256};
Sha256::digest(&ta_bytes).to_vec()
};
(
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-04-07T00:00:00Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: ta_rsync_uri,
sha256: ta_hash,
}],
tals: vec![CirTal {
tal_uri: "https://example.test/root.tal".to_string(),
tal_bytes,
}],
},
ta_bytes,
)
}
fn write_static(root: &Path, date: &str, bytes: &[u8]) {
use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&dir).expect("mkdir static");
std::fs::write(dir.join(hash), bytes).expect("write static object");
}
fn prepare_reference_ccr(work: &Path, cir: &CanonicalInputRepresentation, mirror_root: &Path) -> PathBuf {
let reference_ccr = work.join("reference.ccr");
let rpki_bin = env!("CARGO_BIN_EXE_rpki");
let wrapper = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/cir-rsync-wrapper");
let tal_path = apnic_tal_path();
let ta_path = apnic_ta_path();
let out = Command::new(rpki_bin)
.env("REAL_RSYNC_BIN", "/usr/bin/rsync")
.env("CIR_MIRROR_ROOT", mirror_root)
.args([
"--db",
work.join("reference-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--ta-path",
ta_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
&cir.validation_time
.format(&time::format_description::well_known::Rfc3339)
.unwrap(),
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
reference_ccr.to_string_lossy().as_ref(),
])
.output()
.expect("run reference rpki");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
reference_ccr
}
#[test]
fn cir_replay_matrix_script_matches_reference_for_all_participants() {
if !Path::new("/usr/bin/rsync").exists()
|| !Path::new("/home/yuyr/dev/rust_playground/routinator/target/debug/routinator").exists()
|| !Path::new("/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client").exists()
{
return;
}
let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static");
let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("matrix-out");
let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_replay_matrix.sh");
let out = Command::new(script)
.args([
"--cir",
cir_path.to_string_lossy().as_ref(),
"--static-root",
static_root.to_string_lossy().as_ref(),
"--out-dir",
out_dir.to_string_lossy().as_ref(),
"--reference-ccr",
reference_ccr.to_string_lossy().as_ref(),
"--rpki-client-build-dir",
"/home/yuyr/dev/rpki-client-9.7/build-m5",
"--rpki-bin",
env!("CARGO_BIN_EXE_rpki"),
])
.output()
.expect("run cir matrix script");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let summary: serde_json::Value =
serde_json::from_slice(&std::fs::read(out_dir.join("summary.json")).expect("read summary"))
.expect("parse summary");
assert_eq!(summary["allMatch"], true);
let participants = summary["participants"].as_array().expect("participants array");
assert_eq!(participants.len(), 3);
for participant in participants {
assert_eq!(participant["exitCode"], 0);
assert_eq!(participant["match"], true);
assert_eq!(participant["vrps"]["match"], true);
assert_eq!(participant["vaps"]["match"], true);
}
}

View File

@ -0,0 +1,182 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use rpki::cir::{
CIR_VERSION_V1, CanonicalInputRepresentation, CirHashAlgorithm, CirObject, CirTal, encode_cir,
materialize_cir,
};
fn apnic_tal_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/tal/apnic-rfc7730-https.tal")
}
fn apnic_ta_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer")
}
fn build_ta_only_cir() -> (CanonicalInputRepresentation, Vec<u8>) {
let tal_bytes = std::fs::read(apnic_tal_path()).expect("read tal");
let ta_bytes = std::fs::read(apnic_ta_path()).expect("read ta");
let tal = rpki::data_model::tal::Tal::decode_bytes(&tal_bytes).expect("decode tal");
let ta_rsync_uri = tal
.ta_uris
.iter()
.find(|uri| uri.scheme() == "rsync")
.expect("tal has rsync uri")
.as_str()
.to_string();
let ta_hash = {
use sha2::{Digest, Sha256};
Sha256::digest(&ta_bytes).to_vec()
};
(
CanonicalInputRepresentation {
version: CIR_VERSION_V1,
hash_alg: CirHashAlgorithm::Sha256,
validation_time: time::OffsetDateTime::parse(
"2026-04-07T00:00:00Z",
&time::format_description::well_known::Rfc3339,
)
.unwrap(),
objects: vec![CirObject {
rsync_uri: ta_rsync_uri,
sha256: ta_hash,
}],
tals: vec![CirTal {
tal_uri: "https://example.test/root.tal".to_string(),
tal_bytes,
}],
},
ta_bytes,
)
}
fn write_static(root: &Path, date: &str, bytes: &[u8]) {
use sha2::{Digest, Sha256};
let hash = hex::encode(Sha256::digest(bytes));
let dir = root.join(date).join(&hash[0..2]).join(&hash[2..4]);
std::fs::create_dir_all(&dir).expect("mkdir static");
std::fs::write(dir.join(hash), bytes).expect("write static object");
}
fn prepare_reference_ccr(work: &Path, cir: &CanonicalInputRepresentation, mirror_root: &Path) -> PathBuf {
let reference_ccr = work.join("reference.ccr");
let rpki_bin = env!("CARGO_BIN_EXE_rpki");
let wrapper = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/cir-rsync-wrapper");
let tal_path = apnic_tal_path();
let ta_path = apnic_ta_path();
let out = Command::new(rpki_bin)
.env("REAL_RSYNC_BIN", "/usr/bin/rsync")
.env("CIR_MIRROR_ROOT", mirror_root)
.args([
"--db",
work.join("reference-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--ta-path",
ta_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
&cir.validation_time
.format(&time::format_description::well_known::Rfc3339)
.unwrap(),
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
reference_ccr.to_string_lossy().as_ref(),
])
.output()
.expect("run reference rpki");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
reference_ccr
}
#[test]
fn cir_routinator_script_matches_reference_on_ta_only_cir() {
if !Path::new("/usr/bin/rsync").exists()
|| !Path::new("/home/yuyr/dev/rust_playground/routinator/target/debug/routinator").exists()
{
return;
}
let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static");
let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("routinator-out");
let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_replay_routinator.sh");
let out = Command::new(script)
.args([
"--cir",
cir_path.to_string_lossy().as_ref(),
"--static-root",
static_root.to_string_lossy().as_ref(),
"--out-dir",
out_dir.to_string_lossy().as_ref(),
"--reference-ccr",
reference_ccr.to_string_lossy().as_ref(),
])
.output()
.expect("run routinator cir script");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let summary: serde_json::Value = serde_json::from_slice(
&std::fs::read(out_dir.join("compare-summary.json")).expect("read summary"),
)
.expect("parse summary");
assert_eq!(summary["vrps"]["match"], true);
assert_eq!(summary["vaps"]["match"], true);
}
#[test]
fn cir_rpki_client_script_matches_reference_on_ta_only_cir() {
if !Path::new("/usr/bin/rsync").exists()
|| !Path::new("/home/yuyr/dev/rpki-client-9.7/build-m5/src/rpki-client").exists()
{
return;
}
let td = tempfile::tempdir().expect("tempdir");
let static_root = td.path().join("static");
let cir_path = td.path().join("sample.cir");
let mirror_root = td.path().join("mirror");
let out_dir = td.path().join("rpki-client-out");
let (cir, ta_bytes) = build_ta_only_cir();
std::fs::write(&cir_path, encode_cir(&cir).expect("encode cir")).expect("write cir");
write_static(&static_root, "20260407", &ta_bytes);
materialize_cir(&cir, &static_root, &mirror_root, true).expect("materialize");
let reference_ccr = prepare_reference_ccr(td.path(), &cir, &mirror_root);
let script = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/run_cir_replay_rpki_client.sh");
let out = Command::new(script)
.args([
"--cir",
cir_path.to_string_lossy().as_ref(),
"--static-root",
static_root.to_string_lossy().as_ref(),
"--out-dir",
out_dir.to_string_lossy().as_ref(),
"--reference-ccr",
reference_ccr.to_string_lossy().as_ref(),
"--build-dir",
"/home/yuyr/dev/rpki-client-9.7/build-m5",
])
.output()
.expect("run rpki-client cir script");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let summary: serde_json::Value = serde_json::from_slice(
&std::fs::read(out_dir.join("compare-summary.json")).expect("read summary"),
)
.expect("parse summary");
assert_eq!(summary["vrps"]["match"], true);
assert_eq!(summary["vaps"]["match"], true);
}

View File

@ -0,0 +1,190 @@
use std::path::PathBuf;
use std::process::Command;
use std::os::unix::fs::MetadataExt;
fn wrapper_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("scripts/cir/cir-rsync-wrapper")
}
fn real_rsync() -> Option<String> {
let candidate = "/usr/bin/rsync";
if std::path::Path::new(candidate).exists() {
return Some(candidate.to_string());
}
None
}
#[test]
fn cir_rsync_wrapper_passes_through_help() {
let Some(real) = real_rsync() else {
return;
};
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.arg("-h")
.output()
.expect("run wrapper -h");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
assert!(stdout.contains("rsync") || stderr.contains("rsync"));
}
#[test]
fn cir_rsync_wrapper_rewrites_rsync_source_to_mirror_tree() {
let Some(real) = real_rsync() else {
return;
};
let td = tempfile::tempdir().expect("tempdir");
let mirror_root = td.path().join("mirror");
let dest_root = td.path().join("dest");
let repo_root = mirror_root.join("example.net").join("repo");
std::fs::create_dir_all(repo_root.join("nested")).expect("mkdirs");
std::fs::write(repo_root.join("a.roa"), b"roa").expect("write roa");
std::fs::write(repo_root.join("nested").join("b.txt"), b"txt").expect("write txt");
std::fs::create_dir_all(&dest_root).expect("mkdir dest");
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.env("CIR_MIRROR_ROOT", &mirror_root)
.args([
"-rt",
"--address",
"127.0.0.1",
"--contimeout=10",
"--include=*/",
"--include=*.roa",
"--exclude=*",
"rsync://example.net/repo/",
dest_root.to_string_lossy().as_ref(),
])
.output()
.expect("run wrapper rewrite");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
assert_eq!(std::fs::read(dest_root.join("a.roa")).expect("read copied roa"), b"roa");
assert!(!dest_root.join("nested").join("b.txt").exists());
}
#[test]
fn cir_rsync_wrapper_rewrites_module_root_without_trailing_slash_as_contents() {
let Some(real) = real_rsync() else {
return;
};
let td = tempfile::tempdir().expect("tempdir");
let mirror_root = td.path().join("mirror");
let dest_root = td.path().join("dest");
let repo_root = mirror_root.join("example.net").join("repo");
std::fs::create_dir_all(repo_root.join("sub")).expect("mkdirs");
std::fs::write(repo_root.join("root.cer"), b"cer").expect("write cer");
std::fs::write(repo_root.join("sub").join("child.roa"), b"roa").expect("write roa");
std::fs::create_dir_all(&dest_root).expect("mkdir dest");
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.env("CIR_MIRROR_ROOT", &mirror_root)
.args([
"-rt",
"--include=*/",
"--include=*.cer",
"--include=*.roa",
"--exclude=*",
"rsync://example.net/repo",
dest_root.to_string_lossy().as_ref(),
])
.output()
.expect("run wrapper rewrite");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
assert_eq!(std::fs::read(dest_root.join("root.cer")).expect("read copied root cer"), b"cer");
assert_eq!(
std::fs::read(dest_root.join("sub").join("child.roa")).expect("read copied child roa"),
b"roa"
);
assert!(!dest_root.join("repo").exists(), "module root must not be nested under destination");
}
#[test]
fn cir_rsync_wrapper_requires_mirror_root_for_rsync_source() {
let Some(real) = real_rsync() else {
return;
};
let td = tempfile::tempdir().expect("tempdir");
let dest_root = td.path().join("dest");
std::fs::create_dir_all(&dest_root).expect("mkdir dest");
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.args(["-rt", "rsync://example.net/repo/", dest_root.to_string_lossy().as_ref()])
.output()
.expect("run wrapper missing env");
assert!(!out.status.success());
assert!(String::from_utf8_lossy(&out.stderr).contains("CIR_MIRROR_ROOT"));
}
#[test]
fn cir_rsync_wrapper_leaves_local_source_untouched() {
let Some(real) = real_rsync() else {
return;
};
let td = tempfile::tempdir().expect("tempdir");
let src_root = td.path().join("src");
let dest_root = td.path().join("dest");
std::fs::create_dir_all(&src_root).expect("mkdir src");
std::fs::create_dir_all(&dest_root).expect("mkdir dest");
std::fs::write(src_root.join("x.cer"), b"x").expect("write source");
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.args([
"-rt",
src_root.to_string_lossy().as_ref(),
dest_root.to_string_lossy().as_ref(),
])
.output()
.expect("run wrapper local passthrough");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
assert_eq!(std::fs::read(dest_root.join("src").join("x.cer")).expect("read copied file"), b"x");
}
#[test]
fn cir_rsync_wrapper_local_link_mode_uses_hardlinks_for_rewritten_sources() {
let Some(real) = real_rsync() else {
return;
};
let td = tempfile::tempdir().expect("tempdir");
let mirror_root = td.path().join("mirror");
let dest_root = td.path().join("dest");
let repo_root = mirror_root.join("example.net").join("repo");
std::fs::create_dir_all(repo_root.join("nested")).expect("mkdirs");
let src_file = repo_root.join("a.roa");
let src_nested = repo_root.join("nested").join("b.cer");
std::fs::write(&src_file, b"roa").expect("write roa");
std::fs::write(&src_nested, b"cer").expect("write cer");
std::fs::create_dir_all(&dest_root).expect("mkdir dest");
let out = Command::new(wrapper_path())
.env("REAL_RSYNC_BIN", real)
.env("CIR_MIRROR_ROOT", &mirror_root)
.env("CIR_LOCAL_LINK_MODE", "1")
.args([
"-rt",
"--delete",
"--include=*/",
"--include=*.roa",
"--include=*.cer",
"--exclude=*",
"rsync://example.net/repo/",
dest_root.to_string_lossy().as_ref(),
])
.output()
.expect("run wrapper local-link mode");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let dst_file = dest_root.join("a.roa");
let dst_nested = dest_root.join("nested").join("b.cer");
assert_eq!(std::fs::read(&dst_file).expect("read dest roa"), b"roa");
assert_eq!(std::fs::read(&dst_nested).expect("read dest cer"), b"cer");
let src_meta = std::fs::metadata(&src_file).expect("src metadata");
let dst_meta = std::fs::metadata(&dst_file).expect("dst metadata");
assert_eq!(src_meta.ino(), dst_meta.ino(), "expected hardlinked destination file");
}

View File

@ -1,3 +1,5 @@
use std::process::Command;
#[test]
fn cli_run_offline_mode_executes_and_writes_json_and_ccr() {
let db_dir = tempfile::tempdir().expect("db tempdir");
@ -80,3 +82,168 @@ fn cli_run_offline_mode_writes_decodable_ccr() {
let ccr = rpki::ccr::decode_content_info(&bytes).expect("decode ccr");
assert!(ccr.content.tas.is_some());
}
#[test]
fn cli_run_offline_mode_writes_cir_and_static_pool() {
let db_dir = tempfile::tempdir().expect("db tempdir");
let repo_dir = tempfile::tempdir().expect("repo tempdir");
let out_dir = tempfile::tempdir().expect("out tempdir");
let cir_path = out_dir.path().join("result.cir");
let static_root = out_dir.path().join("static");
let policy_path = out_dir.path().join("policy.toml");
std::fs::write(&policy_path, "sync_preference = \"rsync_only\"\n").expect("write policy");
let tal_path = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/tal/apnic-rfc7730-https.tal");
let ta_path =
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer");
let argv = vec![
"rpki".to_string(),
"--db".to_string(),
db_dir.path().to_string_lossy().to_string(),
"--policy".to_string(),
policy_path.to_string_lossy().to_string(),
"--tal-path".to_string(),
tal_path.to_string_lossy().to_string(),
"--ta-path".to_string(),
ta_path.to_string_lossy().to_string(),
"--rsync-local-dir".to_string(),
repo_dir.path().to_string_lossy().to_string(),
"--max-depth".to_string(),
"0".to_string(),
"--max-instances".to_string(),
"1".to_string(),
"--cir-enable".to_string(),
"--cir-out".to_string(),
cir_path.to_string_lossy().to_string(),
"--cir-static-root".to_string(),
static_root.to_string_lossy().to_string(),
"--cir-tal-uri".to_string(),
"https://example.test/root.tal".to_string(),
];
rpki::cli::run(&argv).expect("cli run");
let bytes = std::fs::read(&cir_path).expect("read cir");
let cir = rpki::cir::decode_cir(&bytes).expect("decode cir");
assert_eq!(cir.tals.len(), 1);
assert_eq!(cir.tals[0].tal_uri, "https://example.test/root.tal");
assert!(cir
.objects
.iter()
.any(|item| item.rsync_uri.contains("apnic-rpki-root-iana-origin.cer")));
let mut file_count = 0usize;
let mut stack = vec![static_root.clone()];
while let Some(path) = stack.pop() {
for entry in std::fs::read_dir(path).expect("read_dir") {
let entry = entry.expect("entry");
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else {
file_count += 1;
}
}
}
assert!(file_count >= 1);
}
#[test]
fn cli_run_blackbox_rsync_wrapper_mode_matches_reference_ccr_without_ta_path() {
let real_rsync = std::path::Path::new("/usr/bin/rsync");
if !real_rsync.exists() {
return;
}
let db_dir = tempfile::tempdir().expect("db tempdir");
let out_dir = tempfile::tempdir().expect("out tempdir");
let mirror_root = out_dir.path().join("mirror");
let ref_ccr_path = out_dir.path().join("reference.ccr");
let actual_ccr_path = out_dir.path().join("actual.ccr");
let tal_path = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/tal/apnic-rfc7730-https.tal");
let ta_path =
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/ta/apnic-ta.cer");
let ta_bytes = std::fs::read(&ta_path).expect("read ta");
std::fs::create_dir_all(mirror_root.join("rpki.apnic.net").join("repository"))
.expect("mkdir mirror");
std::fs::write(
mirror_root
.join("rpki.apnic.net")
.join("repository")
.join("apnic-rpki-root-iana-origin.cer"),
ta_bytes,
)
.expect("write ta into mirror");
let bin = env!("CARGO_BIN_EXE_rpki");
let wrapper = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("scripts/cir/cir-rsync-wrapper");
let reference = Command::new(bin)
.env("REAL_RSYNC_BIN", real_rsync)
.env("CIR_MIRROR_ROOT", &mirror_root)
.args([
"--db",
db_dir.path().join("reference-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--ta-path",
ta_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
"2026-04-07T00:00:00Z",
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
ref_ccr_path.to_string_lossy().as_ref(),
])
.output()
.expect("run reference wrapper mode");
assert!(reference.status.success(), "stderr={}", String::from_utf8_lossy(&reference.stderr));
let out = Command::new(bin)
.env("REAL_RSYNC_BIN", real_rsync)
.env("CIR_MIRROR_ROOT", &mirror_root)
.args([
"--db",
db_dir.path().join("actual-db").to_string_lossy().as_ref(),
"--tal-path",
tal_path.to_string_lossy().as_ref(),
"--disable-rrdp",
"--rsync-command",
wrapper.to_string_lossy().as_ref(),
"--validation-time",
"2026-04-07T00:00:00Z",
"--max-depth",
"0",
"--max-instances",
"1",
"--ccr-out",
actual_ccr_path.to_string_lossy().as_ref(),
])
.output()
.expect("run blackbox wrapper mode");
assert!(out.status.success(), "stderr={}", String::from_utf8_lossy(&out.stderr));
let reference = rpki::ccr::decode_content_info(&std::fs::read(&ref_ccr_path).unwrap())
.expect("decode reference ccr");
let actual = rpki::ccr::decode_content_info(&std::fs::read(&actual_ccr_path).unwrap())
.expect("decode actual ccr");
assert_eq!(actual.content.version, reference.content.version);
assert_eq!(actual.content.hash_alg, reference.content.hash_alg);
assert_eq!(actual.content.mfts, reference.content.mfts);
assert_eq!(actual.content.vrps, reference.content.vrps);
assert_eq!(actual.content.vaps, reference.content.vaps);
assert_eq!(actual.content.tas, reference.content.tas);
assert_eq!(actual.content.rks, reference.content.rks);
}