rpki/tests/bench_manifest_decode_profile.rs
2026-03-04 11:12:53 +08:00

317 lines
9.7 KiB
Rust

use rpki::data_model::manifest::ManifestObject;
use std::path::{Path, PathBuf};
use std::time::Instant;
fn default_samples_dir() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/benchmark/selected_der")
}
fn read_samples(dir: &Path) -> Vec<Sample> {
let mut out = Vec::new();
let rd = std::fs::read_dir(dir).unwrap_or_else(|e| panic!("read_dir {}: {e}", dir.display()));
for ent in rd.flatten() {
let path = ent.path();
if path.extension().and_then(|s| s.to_str()) != Some("mft") {
continue;
}
let name = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string();
out.push(Sample { name, path });
}
out.sort_by(|a, b| a.name.cmp(&b.name));
out
}
#[derive(Clone, Debug)]
struct Sample {
name: String,
path: PathBuf,
}
fn env_u64(name: &str, default: u64) -> u64 {
std::env::var(name)
.ok()
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(default)
}
fn env_u64_opt(name: &str) -> Option<u64> {
std::env::var(name).ok().and_then(|s| s.parse::<u64>().ok())
}
fn env_bool(name: &str) -> bool {
matches!(
std::env::var(name).as_deref(),
Ok("1") | Ok("true") | Ok("TRUE") | Ok("yes") | Ok("YES")
)
}
fn env_string(name: &str) -> Option<String> {
std::env::var(name).ok().filter(|s| !s.trim().is_empty())
}
fn escape_md(s: &str) -> String {
s.replace('|', "\\|").replace('\n', " ")
}
fn create_parent_dirs(path: &Path) {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).unwrap_or_else(|e| {
panic!("create_dir_all {}: {e}", parent.display());
});
}
}
#[test]
#[ignore = "manual performance benchmark; prints Markdown table"]
fn manifest_decode_profile_benchmark_selected_der() {
let dir = env_string("BENCH_DIR")
.map(PathBuf::from)
.unwrap_or_else(default_samples_dir);
let sample_filter = env_string("BENCH_SAMPLE");
let fixed_iters = env_u64_opt("BENCH_ITERS");
let warmup_iters = env_u64("BENCH_WARMUP_ITERS", 100);
let rounds = env_u64("BENCH_ROUNDS", 5);
let min_round_ms = env_u64("BENCH_MIN_ROUND_MS", 200);
let max_adaptive_iters = env_u64("BENCH_MAX_ITERS", 1_000_000);
let verbose = env_bool("BENCH_VERBOSE");
let out_md = env_string("BENCH_OUT_MD").map(|p| PathBuf::from(p));
let out_json = env_string("BENCH_OUT_JSON").map(|p| PathBuf::from(p));
if let Some(n) = fixed_iters {
assert!(n >= 1, "BENCH_ITERS must be >= 1");
}
assert!(rounds >= 1, "BENCH_ROUNDS must be >= 1");
assert!(min_round_ms >= 1, "BENCH_MIN_ROUND_MS must be >= 1");
assert!(max_adaptive_iters >= 1, "BENCH_MAX_ITERS must be >= 1");
let mut samples = read_samples(&dir);
assert!(
!samples.is_empty(),
"no .mft files found under: {}",
dir.display()
);
if let Some(filter) = sample_filter.as_deref() {
samples.retain(|s| s.name == filter);
assert!(
!samples.is_empty(),
"no sample matched BENCH_SAMPLE={filter}"
);
}
println!("# Manifest decode + profile validate benchmark (debug build)");
println!();
println!("- dir: {}", dir.display());
if let Some(n) = fixed_iters {
println!("- iters: {} (fixed)", n);
} else {
println!(
"- warmup: {} iters, rounds: {}, min_round: {}ms (adaptive iters, max {})",
warmup_iters, rounds, min_round_ms, max_adaptive_iters
);
}
if let Some(filter) = sample_filter.as_deref() {
println!("- sample: {}", filter);
}
if verbose {
println!("- verbose: true");
}
if let Some(p) = out_md.as_ref() {
println!("- out_md: {}", p.display());
}
if let Some(p) = out_json.as_ref() {
println!("- out_json: {}", p.display());
}
println!();
println!("Samples:");
for s in &samples {
println!("- {}", s.name);
}
println!();
println!("| sample | file_count | avg ns/op | ops/s |");
println!("|---|---:|---:|---:|");
let mut rows: Vec<ResultRow> = Vec::with_capacity(samples.len());
for s in samples {
let bytes =
std::fs::read(&s.path).unwrap_or_else(|e| panic!("read {}: {e}", s.path.display()));
let file_count = ManifestObject::decode_der(bytes.as_slice())
.unwrap_or_else(|e| panic!("decode {}: {e}", s.name))
.manifest
.file_count();
// Warm-up: exercise the exact decode path but don't time it.
for _ in 0..warmup_iters {
let input = std::hint::black_box(bytes.as_slice());
let decoded = ManifestObject::decode_der(input).expect("decode");
std::hint::black_box(decoded);
}
let mut per_round_ns_per_op = Vec::with_capacity(rounds as usize);
for round in 0..rounds {
let iters = if let Some(n) = fixed_iters {
n
} else {
choose_iters_adaptive(bytes.as_slice(), min_round_ms, max_adaptive_iters)
};
let start = Instant::now();
for _ in 0..iters {
let input = std::hint::black_box(bytes.as_slice());
let decoded = ManifestObject::decode_der(input).expect("decode");
std::hint::black_box(decoded);
}
let elapsed = start.elapsed();
let total_ns = elapsed.as_secs_f64() * 1e9;
let ns_per_op = total_ns / (iters as f64);
per_round_ns_per_op.push(ns_per_op);
if verbose {
println!(
"# {} round {}: iters={} total_ms={:.2} ns/op={:.2}",
s.name,
round + 1,
iters,
elapsed.as_secs_f64() * 1e3,
ns_per_op
);
}
}
let avg_ns = per_round_ns_per_op.iter().sum::<f64>() / (per_round_ns_per_op.len() as f64);
let ops_per_sec = 1e9_f64 / avg_ns;
println!(
"| {} | {} | {:.2} | {:.2} |",
s.name, file_count, avg_ns, ops_per_sec
);
rows.push(ResultRow {
sample: s.name,
file_count,
avg_ns_per_op: avg_ns,
ops_per_sec,
});
}
if out_md.is_some() || out_json.is_some() {
let timestamp_utc = time::OffsetDateTime::now_utc()
.format(&time::format_description::well_known::Rfc3339)
.unwrap_or_else(|_| "unknown".to_string());
let cfg = RunConfig {
dir: dir.display().to_string(),
sample: sample_filter,
fixed_iters,
warmup_iters,
rounds,
min_round_ms,
max_adaptive_iters,
timestamp_utc,
};
if let Some(path) = out_md {
let md = render_markdown(&cfg, &rows);
write_text_file(&path, &md);
eprintln!("Wrote {}", path.display());
}
if let Some(path) = out_json {
let json = serde_json::to_string_pretty(&BenchmarkOutput { config: cfg, rows })
.expect("serialize json");
write_text_file(&path, &json);
eprintln!("Wrote {}", path.display());
}
}
}
fn choose_iters_adaptive(bytes: &[u8], min_round_ms: u64, max_iters: u64) -> u64 {
let min_secs = (min_round_ms as f64) / 1e3;
let mut iters: u64 = 1;
loop {
let start = Instant::now();
for _ in 0..iters {
let input = std::hint::black_box(bytes);
let decoded = ManifestObject::decode_der(input).expect("decode");
std::hint::black_box(decoded);
}
let elapsed = start.elapsed().as_secs_f64();
if elapsed >= min_secs {
return iters;
}
if iters >= max_iters {
return iters;
}
iters = (iters.saturating_mul(2)).min(max_iters);
}
}
fn render_markdown(cfg: &RunConfig, rows: &[ResultRow]) -> String {
let mut out = String::new();
out.push_str("# Manifest decode + profile validate benchmark (debug build)\n\n");
out.push_str(&format!("- timestamp_utc: {}\n", cfg.timestamp_utc));
out.push_str(&format!("- dir: `{}`\n", cfg.dir));
if let Some(s) = cfg.sample.as_deref() {
out.push_str(&format!("- sample: `{}`\n", s));
}
if let Some(n) = cfg.fixed_iters {
out.push_str(&format!("- iters: {} (fixed)\n", n));
} else {
out.push_str(&format!(
"- warmup: {} iters, rounds: {}, min_round: {}ms (adaptive iters, max {})\n",
cfg.warmup_iters, cfg.rounds, cfg.min_round_ms, cfg.max_adaptive_iters
));
}
out.push('\n');
out.push_str("| sample | file_count | avg ns/op | ops/s |\n");
out.push_str("|---|---:|---:|---:|\n");
for r in rows {
out.push_str(&format!(
"| {} | {} | {:.2} | {:.2} |\n",
escape_md(&r.sample),
r.file_count,
r.avg_ns_per_op,
r.ops_per_sec
));
}
out
}
fn write_text_file(path: &Path, content: &str) {
create_parent_dirs(path);
std::fs::write(path, content).unwrap_or_else(|e| panic!("write {}: {e}", path.display()));
}
#[derive(Clone, Debug, serde::Serialize)]
struct RunConfig {
dir: String,
sample: Option<String>,
fixed_iters: Option<u64>,
warmup_iters: u64,
rounds: u64,
min_round_ms: u64,
max_adaptive_iters: u64,
timestamp_utc: String,
}
#[derive(Clone, Debug, serde::Serialize)]
struct ResultRow {
sample: String,
file_count: usize,
avg_ns_per_op: f64,
ops_per_sec: f64,
}
#[derive(Clone, Debug, serde::Serialize)]
struct BenchmarkOutput {
config: RunConfig,
rows: Vec<ResultRow>,
}