#!/usr/bin/env bash set -euo pipefail ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" cd "$ROOT_DIR" DELTA_ROOT="${DELTA_ROOT:-$ROOT_DIR/target/live/apnic_delta_demo/20260315-170223-autoplay}" TAL_PATH="${TAL_PATH:-$ROOT_DIR/tests/fixtures/tal/apnic-rfc7730-https.tal}" TA_PATH="${TA_PATH:-$ROOT_DIR/tests/fixtures/ta/apnic-ta.cer}" PAYLOAD_BASE_ARCHIVE="${PAYLOAD_BASE_ARCHIVE:-$DELTA_ROOT/base-payload-archive}" PAYLOAD_BASE_LOCKS="${PAYLOAD_BASE_LOCKS:-$DELTA_ROOT/base-locks.json}" PAYLOAD_DELTA_ARCHIVE="${PAYLOAD_DELTA_ARCHIVE:-$DELTA_ROOT/payload-delta-archive}" PAYLOAD_DELTA_LOCKS="${PAYLOAD_DELTA_LOCKS:-$DELTA_ROOT/locks-delta.json}" VALIDATION_TIME="${VALIDATION_TIME:-2026-03-15T10:00:00Z}" PAYLOAD_BASE_VALIDATION_TIME="${PAYLOAD_BASE_VALIDATION_TIME:-}" TRUST_ANCHOR="${TRUST_ANCHOR:-apnic}" ROUTINATOR_RECORD_CSV="${ROUTINATOR_RECORD_CSV:-$DELTA_ROOT/record-delta.csv}" MAX_DEPTH="${MAX_DEPTH:-}" MAX_INSTANCES="${MAX_INSTANCES:-}" OUT_DIR="${OUT_DIR:-$ROOT_DIR/target/live/payload_delta_replay_runs}" mkdir -p "$OUT_DIR" TS="$(date -u +%Y%m%dT%H%M%SZ)" RUN_NAME="${RUN_NAME:-apnic_delta_replay_${TS}}" DB_DIR="${DB_DIR:-$OUT_DIR/${RUN_NAME}_db}" REPORT_JSON="${REPORT_JSON:-$OUT_DIR/${RUN_NAME}_report.json}" RUN_LOG="${RUN_LOG:-$OUT_DIR/${RUN_NAME}_run.log}" META_JSON="${META_JSON:-$OUT_DIR/${RUN_NAME}_meta.json}" SUMMARY_MD="${SUMMARY_MD:-$OUT_DIR/${RUN_NAME}_summary.md}" VRPS_CSV="${VRPS_CSV:-$OUT_DIR/${RUN_NAME}_vrps.csv}" COMPARE_SUMMARY_MD="${COMPARE_SUMMARY_MD:-$OUT_DIR/${RUN_NAME}_compare_summary.md}" ONLY_IN_OURS_CSV="${ONLY_IN_OURS_CSV:-$OUT_DIR/${RUN_NAME}_only_in_ours.csv}" ONLY_IN_RECORD_CSV="${ONLY_IN_RECORD_CSV:-$OUT_DIR/${RUN_NAME}_only_in_record.csv}" cmd=(cargo run --release --bin rpki -- --db "$DB_DIR" --tal-path "$TAL_PATH" --ta-path "$TA_PATH" --payload-base-archive "$PAYLOAD_BASE_ARCHIVE" --payload-base-locks "$PAYLOAD_BASE_LOCKS" --payload-delta-archive "$PAYLOAD_DELTA_ARCHIVE" --payload-delta-locks "$PAYLOAD_DELTA_LOCKS" --validation-time "$VALIDATION_TIME" --report-json "$REPORT_JSON") if [[ -n "$MAX_DEPTH" ]]; then cmd+=(--max-depth "$MAX_DEPTH") fi if [[ -n "$MAX_INSTANCES" ]]; then cmd+=(--max-instances "$MAX_INSTANCES") fi run_start_s="$(date +%s)" ( echo "# command:" printf '%q ' "${cmd[@]}" echo echo "${cmd[@]}" ) 2>&1 | tee "$RUN_LOG" >/dev/null run_end_s="$(date +%s)" run_duration_s="$((run_end_s - run_start_s))" PAYLOAD_BASE_ARCHIVE="$PAYLOAD_BASE_ARCHIVE" \ PAYLOAD_BASE_LOCKS="$PAYLOAD_BASE_LOCKS" \ PAYLOAD_DELTA_ARCHIVE="$PAYLOAD_DELTA_ARCHIVE" \ PAYLOAD_DELTA_LOCKS="$PAYLOAD_DELTA_LOCKS" \ PAYLOAD_BASE_VALIDATION_TIME="$PAYLOAD_BASE_VALIDATION_TIME" \ DB_DIR="$DB_DIR" \ REPORT_JSON="$REPORT_JSON" \ RUN_LOG="$RUN_LOG" \ VALIDATION_TIME="$VALIDATION_TIME" \ RUN_DURATION_S="$run_duration_s" \ python3 - "$REPORT_JSON" "$META_JSON" "$SUMMARY_MD" <<'PY' import json import os import sys from datetime import datetime, timezone from pathlib import Path report_path = Path(sys.argv[1]) meta_path = Path(sys.argv[2]) summary_path = Path(sys.argv[3]) rep = json.loads(report_path.read_text(encoding='utf-8')) now = datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ') meta = { 'recorded_at_utc': now, 'payload_base_archive': os.environ['PAYLOAD_BASE_ARCHIVE'], 'payload_base_locks': os.environ['PAYLOAD_BASE_LOCKS'], 'payload_delta_archive': os.environ['PAYLOAD_DELTA_ARCHIVE'], 'payload_delta_locks': os.environ['PAYLOAD_DELTA_LOCKS'], 'db_dir': os.environ['DB_DIR'], 'report_json': os.environ['REPORT_JSON'], 'run_log': os.environ['RUN_LOG'], 'validation_time_arg': os.environ['VALIDATION_TIME'], 'base_validation_time_arg': os.environ.get('PAYLOAD_BASE_VALIDATION_TIME') or os.environ['VALIDATION_TIME'], 'durations_secs': {'rpki_run': int(os.environ['RUN_DURATION_S'])}, 'counts': { 'publication_points_processed': rep['tree']['instances_processed'], 'publication_points_failed': rep['tree']['instances_failed'], 'vrps': len(rep['vrps']), 'aspas': len(rep['aspas']), 'audit_publication_points': len(rep['publication_points']), }, } meta_path.write_text(json.dumps(meta, ensure_ascii=False, indent=2)+'\n', encoding='utf-8') summary = [] summary.append('# Payload Delta Replay Summary\n\n') for key in ['payload_base_archive','payload_base_locks','payload_delta_archive','payload_delta_locks','db_dir','report_json','base_validation_time_arg','validation_time_arg']: summary.append(f'- {key}: `{meta[key]}`\n') summary.append('\n## Results\n\n| metric | value |\n|---|---:|\n') for k,v in meta['counts'].items(): summary.append(f'| {k} | {v} |\n') summary.append('\n## Durations\n\n| step | seconds |\n|---|---:|\n') for k,v in meta['durations_secs'].items(): summary.append(f'| {k} | {v} |\n') summary_path.write_text(''.join(summary), encoding='utf-8') print(summary_path) PY python3 scripts/payload_replay/report_to_routinator_csv.py \ --report "$REPORT_JSON" \ --out "$VRPS_CSV" \ --trust-anchor "$TRUST_ANCHOR" >/dev/null if [[ -f "$ROUTINATOR_RECORD_CSV" ]]; then ./scripts/payload_replay/compare_with_routinator_record.sh \ "$VRPS_CSV" \ "$ROUTINATOR_RECORD_CSV" \ "$COMPARE_SUMMARY_MD" \ "$ONLY_IN_OURS_CSV" \ "$ONLY_IN_RECORD_CSV" >/dev/null fi echo "== payload delta replay run complete ==" >&2 echo "- db: $DB_DIR" >&2 echo "- report: $REPORT_JSON" >&2 echo "- run log: $RUN_LOG" >&2 echo "- meta json: $META_JSON" >&2 echo "- summary md: $SUMMARY_MD" >&2 echo "- vrps csv: $VRPS_CSV" >&2 if [[ -f "$COMPARE_SUMMARY_MD" ]]; then echo "- compare summary: $COMPARE_SUMMARY_MD" >&2 echo "- only in ours: $ONLY_IN_OURS_CSV" >&2 echo "- only in record: $ONLY_IN_RECORD_CSV" >&2 fi