use rpki::bundle::record_io::load_validation_time;
use rpki::storage::RocksStore;
use rpki::validation::run_tree_from_tal::{
run_tree_from_tal_and_ta_der_payload_delta_replay_step_serial_audit,
run_tree_from_tal_and_ta_der_payload_replay_serial_audit,
};
use rpki::validation::tree::TreeRunConfig;
use serde::Serialize;
use std::fs;
use std::path::{Path, PathBuf};
use std::time::Instant;
fn usage() -> &'static str {
"Usage: measure_sequence_replay --bundle-root
[--rir ] --out [--keep-db]"
}
#[derive(Default)]
struct Args {
bundle_root: Option,
rirs: Option>,
out: Option,
keep_db: bool,
}
fn parse_args() -> Result {
let mut out = Args::default();
let argv: Vec = std::env::args().skip(1).collect();
let mut i = 0usize;
while i < argv.len() {
match argv[i].as_str() {
"--bundle-root" => {
i += 1;
out.bundle_root = Some(PathBuf::from(
argv.get(i).ok_or("--bundle-root requires a value")?,
));
}
"--rir" => {
i += 1;
let value = argv.get(i).ok_or("--rir requires a value")?;
out.rirs = Some(
value
.split(',')
.map(|s| s.trim().to_lowercase())
.filter(|s| !s.is_empty())
.collect(),
);
}
"--out" => {
i += 1;
out.out = Some(PathBuf::from(argv.get(i).ok_or("--out requires a value")?));
}
"--keep-db" => out.keep_db = true,
"--help" | "-h" => return Err(usage().to_string()),
other => return Err(format!("unknown argument: {other}\n{}", usage())),
}
i += 1;
}
if out.bundle_root.is_none() || out.out.is_none() {
return Err(format!("--bundle-root and --out are required\n{}", usage()));
}
Ok(out)
}
#[derive(Serialize)]
struct PhaseTiming {
duration_seconds: f64,
vrp_count: usize,
vap_count: usize,
}
#[derive(Serialize)]
struct RirTiming {
rir: String,
base: PhaseTiming,
steps: Vec<(String, PhaseTiming)>,
}
fn discover_rirs(bundle_root: &Path) -> Result, String> {
let mut out = Vec::new();
for entry in fs::read_dir(bundle_root)
.map_err(|e| format!("read_dir failed: {}: {e}", bundle_root.display()))?
{
let entry = entry.map_err(|e| format!("read_dir entry failed: {e}"))?;
let path = entry.path();
if path.is_dir() && path.join("bundle.json").exists() && path.join("tal.tal").exists() {
out.push(
path.file_name()
.and_then(|s| s.to_str())
.ok_or_else(|| format!("invalid rir dir name: {}", path.display()))?
.to_string(),
);
}
}
out.sort();
Ok(out)
}
fn path_join(root: &Path, relative: &str) -> PathBuf {
root.join(relative)
}
fn main() {
if let Err(err) = real_main() {
eprintln!("{err}");
std::process::exit(1);
}
}
fn real_main() -> Result<(), String> {
let args = parse_args()?;
let bundle_root = args.bundle_root.unwrap();
let out_path = args.out.unwrap();
let rirs = match args.rirs {
Some(v) => v,
None => discover_rirs(&bundle_root)?,
};
let mut results = Vec::new();
let tmp_root = out_path
.parent()
.unwrap_or_else(|| Path::new("."))
.join(".tmp-sequence-replay");
fs::create_dir_all(&tmp_root)
.map_err(|e| format!("create tmp root failed: {}: {e}", tmp_root.display()))?;
for rir in rirs {
let rir_dir = bundle_root.join(&rir);
let bundle: serde_json::Value = serde_json::from_slice(
&fs::read(rir_dir.join("bundle.json"))
.map_err(|e| format!("read bundle failed: {}: {e}", rir_dir.display()))?,
)
.map_err(|e| format!("parse bundle failed for {}: {e}", rir_dir.display()))?;
let tal_bytes = fs::read(rir_dir.join("tal.tal"))
.map_err(|e| format!("read tal.tal failed for {}: {e}", rir_dir.display()))?;
let ta_bytes = fs::read(rir_dir.join("ta.cer"))
.map_err(|e| format!("read ta.cer failed for {}: {e}", rir_dir.display()))?;
let db_dir = tmp_root.join(format!("{rir}-db"));
if db_dir.exists() {
fs::remove_dir_all(&db_dir)
.map_err(|e| format!("remove old db failed: {}: {e}", db_dir.display()))?;
}
let store =
RocksStore::open(&db_dir).map_err(|e| format!("open rocksdb failed for {rir}: {e}"))?;
let base_archive = path_join(
&rir_dir,
bundle["base"]["relativeArchivePath"]
.as_str()
.ok_or("bundle missing base.relativeArchivePath")?,
);
let base_locks = path_join(
&rir_dir,
bundle["base"]["relativeLocksPath"]
.as_str()
.ok_or("bundle missing base.relativeLocksPath")?,
);
let base_validation_time = load_validation_time(&base_locks)
.map_err(|e| format!("load base validation time failed for {rir}: {e}"))?;
let start = Instant::now();
let base_out = run_tree_from_tal_and_ta_der_payload_replay_serial_audit(
&store,
&rpki::policy::Policy::default(),
&tal_bytes,
&ta_bytes,
None,
&base_archive,
&base_locks,
base_validation_time,
&TreeRunConfig {
max_depth: None,
max_instances: None,
compact_audit: false,
persist_vcir: true,
build_ccr_accumulator: true,
},
)
.map_err(|e| format!("base replay failed for {rir}: {e}"))?;
let base_timing = PhaseTiming {
duration_seconds: start.elapsed().as_secs_f64(),
vrp_count: base_out.tree.vrps.len(),
vap_count: base_out.tree.aspas.len(),
};
let mut previous_locks = base_locks.clone();
let mut step_timings = Vec::new();
for step in bundle["deltaSequence"]["steps"]
.as_array()
.ok_or("bundle missing deltaSequence.steps")?
{
let step_id = step["id"].as_str().ok_or("step missing id")?.to_string();
let step_dir = path_join(
&rir_dir,
step["relativePath"]
.as_str()
.ok_or("step missing relativePath")?,
);
let delta_archive = path_join(
&rir_dir,
step["relativeArchivePath"]
.as_str()
.ok_or("step missing relativeArchivePath")?,
);
let delta_locks = path_join(
&rir_dir,
step["relativeTransitionLocksPath"]
.as_str()
.ok_or("step missing relativeTransitionLocksPath")?,
);
let validation_time = load_validation_time(&delta_locks).map_err(|e| {
format!("load step validation time failed for {rir}/{step_id}: {e}")
})?;
let start = Instant::now();
let step_out = run_tree_from_tal_and_ta_der_payload_delta_replay_step_serial_audit(
&store,
&rpki::policy::Policy::default(),
&tal_bytes,
&ta_bytes,
None,
&delta_archive,
&previous_locks,
&delta_locks,
validation_time,
&TreeRunConfig {
max_depth: None,
max_instances: None,
compact_audit: false,
persist_vcir: true,
build_ccr_accumulator: true,
},
)
.map_err(|e| format!("delta step replay failed for {rir}/{step_id}: {e}"))?;
step_timings.push((
step_id.clone(),
PhaseTiming {
duration_seconds: start.elapsed().as_secs_f64(),
vrp_count: step_out.tree.vrps.len(),
vap_count: step_out.tree.aspas.len(),
},
));
previous_locks = step_dir.join("target-locks.json");
}
results.push(RirTiming {
rir,
base: base_timing,
steps: step_timings,
});
if !args.keep_db && db_dir.exists() {
fs::remove_dir_all(&db_dir)
.map_err(|e| format!("remove db failed: {}: {e}", db_dir.display()))?;
}
}
fs::write(
&out_path,
serde_json::to_vec_pretty(&results).map_err(|e| format!("encode json failed: {e}"))?,
)
.map_err(|e| format!("write out failed: {}: {e}", out_path.display()))?;
println!("{}", out_path.display());
Ok(())
}