fetch cache pp imporved

This commit is contained in:
yuyr 2026-02-11 10:07:24 +08:00
parent 6e135b9d7a
commit 2a6a963ecd
36 changed files with 3201 additions and 265 deletions

File diff suppressed because it is too large Load Diff

View File

@ -16,13 +16,13 @@ impl Default for SyncPreference {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum CaFailedFetchPolicy {
UseVerifiedCache,
UseFetchCachePp,
StopAllOutput,
}
impl Default for CaFailedFetchPolicy {
fn default() -> Self {
Self::UseVerifiedCache
Self::UseFetchCachePp
}
}

View File

@ -9,15 +9,15 @@ use sha2::Digest;
use std::collections::HashSet;
const CF_RAW_OBJECTS: &str = "raw_objects";
const CF_VERIFIED_PUBLICATION_POINTS: &str = "verified_publication_points";
const CF_FETCH_CACHE_PP: &str = "fetch_cache_pp";
const CF_RRDP_STATE: &str = "rrdp_state";
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct VerifiedKey(String);
pub struct FetchCachePpKey(String);
impl VerifiedKey {
impl FetchCachePpKey {
pub fn from_manifest_rsync_uri(manifest_rsync_uri: &str) -> Self {
Self(format!("verified:{manifest_rsync_uri}"))
Self(format!("fetch_cache_pp:{manifest_rsync_uri}"))
}
pub fn as_str(&self) -> &str {
@ -33,7 +33,7 @@ pub enum StorageError {
#[error("missing column family: {0}")]
MissingColumnFamily(&'static str),
#[error("verified publication point pack error: {0}")]
#[error("fetch_cache_pp pack error: {0}")]
Pack(#[from] PackDecodeError),
}
@ -44,7 +44,7 @@ pub struct RocksStore {
}
pub mod pack {
pub use super::{PackDecodeError, PackFile, PackTime, VerifiedPublicationPointPack};
pub use super::{FetchCachePpPack, PackDecodeError, PackFile, PackTime};
}
impl RocksStore {
@ -61,7 +61,7 @@ impl RocksStore {
let cfs = vec![
ColumnFamilyDescriptor::new(CF_RAW_OBJECTS, Options::default()),
ColumnFamilyDescriptor::new(CF_VERIFIED_PUBLICATION_POINTS, Options::default()),
ColumnFamilyDescriptor::new(CF_FETCH_CACHE_PP, Options::default()),
ColumnFamilyDescriptor::new(CF_RRDP_STATE, Options::default()),
];
@ -102,16 +102,19 @@ impl RocksStore {
Ok(())
}
pub fn put_verified(&self, key: &VerifiedKey, bytes: &[u8]) -> StorageResult<()> {
let cf = self.cf(CF_VERIFIED_PUBLICATION_POINTS)?;
pub fn put_fetch_cache_pp(&self, key: &FetchCachePpKey, bytes: &[u8]) -> StorageResult<()> {
let cf = self.cf(CF_FETCH_CACHE_PP)?;
self.db
.put_cf(cf, key.as_str().as_bytes(), bytes)
.map_err(|e| StorageError::RocksDb(e.to_string()))?;
Ok(())
}
pub fn get_verified(&self, key: &VerifiedKey) -> StorageResult<Option<Vec<u8>>> {
let cf = self.cf(CF_VERIFIED_PUBLICATION_POINTS)?;
pub fn get_fetch_cache_pp(
&self,
key: &FetchCachePpKey,
) -> StorageResult<Option<Vec<u8>>> {
let cf = self.cf(CF_FETCH_CACHE_PP)?;
let v = self
.db
.get_cf(cf, key.as_str().as_bytes())
@ -172,10 +175,10 @@ impl RocksStore {
}
#[allow(dead_code)]
pub fn verified_iter_all<'a>(
pub fn fetch_cache_pp_iter_all<'a>(
&'a self,
) -> StorageResult<impl Iterator<Item = (Box<[u8]>, Box<[u8]>)> + 'a> {
let cf = self.cf(CF_VERIFIED_PUBLICATION_POINTS)?;
let cf = self.cf(CF_FETCH_CACHE_PP)?;
let mode = IteratorMode::Start;
Ok(self.db.iterator_cf(cf, mode).filter_map(|res| res.ok()))
}
@ -213,12 +216,17 @@ fn enable_blobdb_if_supported(opts: &mut Options) {
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct VerifiedPublicationPointPack {
pub struct FetchCachePpPack {
pub format_version: u32,
pub manifest_rsync_uri: String,
pub publication_point_rsync_uri: String,
/// Manifest manifestNumber value (RFC 9286 §4.2.1).
///
/// Minimal big-endian bytes. For zero, this is `[0]`.
pub manifest_number_be: Vec<u8>,
pub this_update: PackTime,
pub next_update: PackTime,
@ -233,7 +241,7 @@ pub struct VerifiedPublicationPointPack {
pub files: Vec<PackFile>,
}
impl VerifiedPublicationPointPack {
impl FetchCachePpPack {
pub const FORMAT_VERSION_V1: u32 = 1;
pub fn encode(&self) -> StorageResult<Vec<u8>> {
@ -258,6 +266,23 @@ impl VerifiedPublicationPointPack {
if self.publication_point_rsync_uri.is_empty() {
return Err(PackDecodeError::MissingField("publication_point_rsync_uri").into());
}
if self.manifest_number_be.is_empty() {
return Err(PackDecodeError::MissingField("manifest_number_be").into());
}
if self.manifest_number_be.len() > 20 {
return Err(PackDecodeError::InvalidField {
field: "manifest_number_be",
detail: "must be at most 20 octets (RFC 9286 §4.2.1)",
}
.into());
}
if self.manifest_number_be.len() > 1 && self.manifest_number_be[0] == 0 {
return Err(PackDecodeError::InvalidField {
field: "manifest_number_be",
detail: "must be minimal big-endian (no leading zeros)",
}
.into());
}
self.this_update
.parse()
@ -354,10 +379,10 @@ impl PackTime {
#[derive(Debug, thiserror::Error)]
pub enum PackDecodeError {
#[error("encode verified publication point pack failed: {0}")]
#[error("encode fetch_cache_pp pack failed: {0}")]
Encode(String),
#[error("decode verified publication point pack failed: {0}")]
#[error("decode fetch_cache_pp pack failed: {0}")]
Decode(String),
#[error("unsupported pack format_version: {0}")]
@ -366,16 +391,22 @@ pub enum PackDecodeError {
#[error("missing required field: {0}")]
MissingField(&'static str),
#[error("missing manifest_bytes in verified pack")]
#[error("invalid field {field}: {detail}")]
InvalidField {
field: &'static str,
detail: &'static str,
},
#[error("missing manifest_bytes in fetch_cache_pp pack")]
MissingManifestBytes,
#[error("duplicate file rsync uri in verified pack: {0}")]
#[error("duplicate file rsync uri in fetch_cache_pp pack: {0}")]
DuplicateFileRsyncUri(String),
#[error("empty file bytes in verified pack: {0}")]
#[error("empty file bytes in fetch_cache_pp pack: {0}")]
EmptyFileBytes(String),
#[error("file hash mismatch in verified pack: {rsync_uri}")]
#[error("file hash mismatch in fetch_cache_pp pack: {rsync_uri}")]
FileHashMismatch { rsync_uri: String },
#[error("invalid time field {field}: {detail}")]

View File

@ -2,19 +2,22 @@ use crate::data_model::manifest::{ManifestDecodeError, ManifestObject, ManifestV
use crate::data_model::signed_object::SignedObjectVerifyError;
use crate::policy::{CaFailedFetchPolicy, Policy};
use crate::report::{RfcRef, Warning};
use crate::storage::{RocksStore, StorageError, VerifiedKey, VerifiedPublicationPointPack};
use crate::storage::{FetchCachePpKey, FetchCachePpPack, RocksStore, StorageError};
use crate::validation::cert_path::{CertPathError, validate_ee_cert_path};
use sha2::Digest;
use std::cmp::Ordering;
use std::collections::HashMap;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PublicationPointSource {
Fresh,
VerifiedCache,
FetchCachePp,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PublicationPointResult {
pub source: PublicationPointSource,
pub pack: VerifiedPublicationPointPack,
pub pack: FetchCachePpPack,
pub warnings: Vec<Warning>,
}
@ -38,6 +41,26 @@ pub enum ManifestFreshError {
)]
Signature(#[from] SignedObjectVerifyError),
#[error(
"manifest embedded EE certificate path validation failed: {0} (RFC 6488 §3; RFC 9286 §6.2; RFC 9286 §6.6)"
)]
EeCertPath(#[from] CertPathError),
#[error(
"manifest embedded EE certificate CRLDistributionPoints missing (cannot validate EE certificate) (RFC 6487 §4.8.6; RFC 6488 §3; RFC 9286 §6.2; RFC 9286 §6.6)"
)]
EeCrlDpMissing,
#[error(
"publication point contains no CRL files (cannot validate manifest EE certificate) (RFC 9286 §7; RFC 6487 §4.8.6; RFC 6488 §3; RFC 9286 §6.2; RFC 9286 §6.6)"
)]
NoCrlFiles,
#[error(
"CRL referenced by manifest embedded EE certificate CRLDistributionPoints not found at publication point: {0} (RFC 6487 §4.8.6; RFC 9286 §4.2.1; RFC 9286 §6.2; RFC 9286 §6.6)"
)]
EeCrlNotFound(String),
#[error(
"manifest is not valid at validation_time: this_update={this_update_rfc3339_utc} next_update={next_update_rfc3339_utc} validation_time={validation_time_rfc3339_utc} (RFC 9286 §6.3; RFC 9286 §6.6)"
)]
@ -47,6 +70,27 @@ pub enum ManifestFreshError {
validation_time_rfc3339_utc: String,
},
#[error(
"manifest must reside at the same publication point as id-ad-caRepository: manifest={manifest_rsync_uri} publication_point={publication_point_rsync_uri} (RFC 9286 §6.1; RFC 9286 §6.6)"
)]
ManifestOutsidePublicationPoint {
manifest_rsync_uri: String,
publication_point_rsync_uri: String,
},
#[error(
"manifestNumber not higher than previously validated manifest: old={old_hex} new={new_hex} (RFC 9286 §4.2.1; RFC 9286 §6.6)"
)]
ManifestNumberNotIncreasing { old_hex: String, new_hex: String },
#[error(
"thisUpdate not more recent than previously validated manifest: old={old_rfc3339_utc} new={new_rfc3339_utc} (RFC 9286 §4.2.1; RFC 9286 §6.6)"
)]
ThisUpdateNotIncreasing {
old_rfc3339_utc: String,
new_rfc3339_utc: String,
},
#[error(
"manifest referenced file missing in raw_objects: {rsync_uri} (RFC 9286 §6.4; RFC 9286 §6.6)"
)]
@ -58,14 +102,24 @@ pub enum ManifestFreshError {
#[derive(Debug, thiserror::Error)]
pub enum ManifestCachedError {
#[error("verified cache entry missing: {0} (RFC 9286 §6.6)")]
MissingVerifiedCache(String),
#[error("fetch_cache_pp entry missing: {0} (RFC 9286 §6.6)")]
MissingFetchCachePp(String),
#[error("verified cache pack invalid: {0}")]
#[error("fetch_cache_pp pack invalid: {0}")]
InvalidPack(#[from] StorageError),
#[error("cached manifest revalidation failed: {0}")]
CachedManifestFresh(#[from] ManifestFreshError),
#[error(
"cached fetch_cache_pp missing file referenced by manifest: {rsync_uri} (RFC 9286 §6.4; RFC 9286 §6.6)"
)]
CachedMissingFile { rsync_uri: String },
#[error(
"cached fetch_cache_pp file hash mismatch: {rsync_uri} (RFC 9286 §6.5; RFC 9286 §6.6)"
)]
CachedHashMismatch { rsync_uri: String },
}
#[derive(Debug, thiserror::Error)]
@ -74,7 +128,7 @@ pub enum ManifestProcessError {
StopAllOutput(#[from] ManifestFreshError),
#[error(
"manifest processing failed and no usable verified cache is available: fresh={fresh}; cached={cached}"
"manifest processing failed and no usable fetch_cache_pp is available: fresh={fresh}; cached={cached}"
)]
NoUsableCache {
fresh: ManifestFreshError,
@ -90,20 +144,24 @@ pub fn process_manifest_publication_point(
policy: &Policy,
manifest_rsync_uri: &str,
publication_point_rsync_uri: &str,
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
validation_time: time::OffsetDateTime,
) -> Result<PublicationPointResult, ManifestProcessError> {
let fresh = try_build_fresh_pack(
store,
manifest_rsync_uri,
publication_point_rsync_uri,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
);
match fresh {
Ok(pack) => {
let key = VerifiedKey::from_manifest_rsync_uri(manifest_rsync_uri);
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_rsync_uri);
let bytes = pack.encode()?;
store.put_verified(&key, &bytes)?;
store.put_fetch_cache_pp(&key, &bytes)?;
Ok(PublicationPointResult {
source: PublicationPointSource::Fresh,
pack,
@ -114,7 +172,7 @@ pub fn process_manifest_publication_point(
CaFailedFetchPolicy::StopAllOutput => {
Err(ManifestProcessError::StopAllOutput(fresh_err))
}
CaFailedFetchPolicy::UseVerifiedCache => {
CaFailedFetchPolicy::UseFetchCachePp => {
let mut warnings = vec![
Warning::new(format!("manifest failed fetch: {fresh_err}"))
.with_rfc_refs(&[RfcRef("RFC 9286 §6.6")])
@ -125,16 +183,18 @@ pub fn process_manifest_publication_point(
store,
manifest_rsync_uri,
publication_point_rsync_uri,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
) {
Ok(pack) => {
warnings.push(
Warning::new("using verified cache for publication point")
Warning::new("using fetch_cache_pp for publication point")
.with_rfc_refs(&[RfcRef("RFC 9286 §6.6")])
.with_context(manifest_rsync_uri),
);
Ok(PublicationPointResult {
source: PublicationPointSource::VerifiedCache,
source: PublicationPointSource::FetchCachePp,
pack,
warnings,
})
@ -153,13 +213,15 @@ fn load_and_revalidate_cached_pack(
store: &RocksStore,
manifest_rsync_uri: &str,
publication_point_rsync_uri: &str,
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
validation_time: time::OffsetDateTime,
) -> Result<VerifiedPublicationPointPack, ManifestCachedError> {
let key = VerifiedKey::from_manifest_rsync_uri(manifest_rsync_uri);
) -> Result<FetchCachePpPack, ManifestCachedError> {
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_rsync_uri);
let bytes = store
.get_verified(&key)?
.ok_or_else(|| ManifestCachedError::MissingVerifiedCache(key.as_str().to_string()))?;
let pack = VerifiedPublicationPointPack::decode(&bytes)?;
.get_fetch_cache_pp(&key)?
.ok_or_else(|| ManifestCachedError::MissingFetchCachePp(key.as_str().to_string()))?;
let pack = FetchCachePpPack::decode(&bytes)?;
if pack.manifest_rsync_uri != manifest_rsync_uri {
return Err(ManifestCachedError::InvalidPack(StorageError::RocksDb(
@ -172,15 +234,61 @@ fn load_and_revalidate_cached_pack(
)));
}
revalidate_pack_with_current_time(&pack, validation_time).map_err(ManifestCachedError::from)?;
revalidate_cached_pack_with_current_time(
&pack,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
)?;
Ok(pack)
}
fn revalidate_pack_with_current_time(
pack: &VerifiedPublicationPointPack,
fn revalidate_cached_pack_with_current_time(
pack: &FetchCachePpPack,
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
validation_time: time::OffsetDateTime,
) -> Result<(), ManifestFreshError> {
let manifest = ManifestObject::decode_der(&pack.manifest_bytes)?;
) -> Result<(), ManifestCachedError> {
// First, re-validate the cached manifest itself with the current time.
let manifest = decode_and_validate_manifest_with_current_time(&pack.manifest_bytes, validation_time)
.map_err(ManifestCachedError::from)?;
// Then, re-bind the manifest fileList to the cached pack contents, as per RFC 9286 §6.4-§6.5.
let by_uri: HashMap<&str, &crate::storage::PackFile> = pack
.files
.iter()
.map(|f| (f.rsync_uri.as_str(), f))
.collect();
for entry in &manifest.manifest.files {
let rsync_uri =
join_rsync_dir_and_file(&pack.publication_point_rsync_uri, &entry.file_name);
let Some(file) = by_uri.get(rsync_uri.as_str()) else {
return Err(ManifestCachedError::CachedMissingFile { rsync_uri });
};
if file.sha256.as_slice() != entry.hash_bytes.as_slice() {
return Err(ManifestCachedError::CachedHashMismatch { rsync_uri });
}
}
// Finally, validate the manifest's embedded EE certificate path against the issuer CA + CRL.
// This enforces cert validity + CRL validity at `validation_time` for cached packs.
validate_manifest_embedded_ee_cert_path(
&manifest,
&pack.files,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
)
.map_err(ManifestCachedError::from)?;
Ok(())
}
fn decode_and_validate_manifest_with_current_time(
manifest_bytes: &[u8],
validation_time: time::OffsetDateTime,
) -> Result<ManifestObject, ManifestFreshError> {
let manifest = ManifestObject::decode_der(manifest_bytes)?;
manifest.validate_embedded_ee_cert()?;
manifest.signed_object.verify()?;
@ -207,15 +315,24 @@ fn revalidate_pack_with_current_time(
});
}
Ok(())
Ok(manifest)
}
fn try_build_fresh_pack(
store: &RocksStore,
manifest_rsync_uri: &str,
publication_point_rsync_uri: &str,
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
validation_time: time::OffsetDateTime,
) -> Result<VerifiedPublicationPointPack, ManifestFreshError> {
) -> Result<FetchCachePpPack, ManifestFreshError> {
if !rsync_uri_is_under_publication_point(manifest_rsync_uri, publication_point_rsync_uri) {
return Err(ManifestFreshError::ManifestOutsidePublicationPoint {
manifest_rsync_uri: manifest_rsync_uri.to_string(),
publication_point_rsync_uri: publication_point_rsync_uri.to_string(),
});
}
let manifest_bytes = store
.get_raw(manifest_rsync_uri)
.map_err(|e| ManifestFreshError::MissingManifest {
@ -225,33 +342,52 @@ fn try_build_fresh_pack(
manifest_rsync_uri: manifest_rsync_uri.to_string(),
})?;
let manifest = ManifestObject::decode_der(&manifest_bytes)?;
manifest.validate_embedded_ee_cert()?;
manifest.signed_object.verify()?;
let manifest = decode_and_validate_manifest_with_current_time(&manifest_bytes, validation_time)?;
let this_update = manifest
.manifest
.this_update
.to_offset(time::UtcOffset::UTC);
let next_update = manifest
.manifest
.next_update
.to_offset(time::UtcOffset::UTC);
let this_update = manifest.manifest.this_update.to_offset(time::UtcOffset::UTC);
let next_update = manifest.manifest.next_update.to_offset(time::UtcOffset::UTC);
let now = validation_time.to_offset(time::UtcOffset::UTC);
if now < this_update || now > next_update {
return Err(ManifestFreshError::StaleOrEarly {
this_update_rfc3339_utc: this_update
.format(&time::format_description::well_known::Rfc3339)
.expect("format thisUpdate"),
next_update_rfc3339_utc: next_update
.format(&time::format_description::well_known::Rfc3339)
.expect("format nextUpdate"),
validation_time_rfc3339_utc: now
.format(&time::format_description::well_known::Rfc3339)
.expect("format validation_time"),
// RFC 9286 §4.2.1: replay/rollback detection for manifestNumber and thisUpdate.
//
// If a purported "new" manifest contains a manifestNumber equal to or lower than previously
// validated manifests, or a thisUpdate less recent than previously validated manifests,
// this is treated as a failed fetch and processing continues via the cached objects path (§6.6).
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_rsync_uri);
if let Some(old_bytes) = store.get_fetch_cache_pp(&key).ok().flatten() {
if let Ok(old_pack) = FetchCachePpPack::decode(&old_bytes) {
if old_pack.manifest_rsync_uri == manifest_rsync_uri
&& old_pack.publication_point_rsync_uri == publication_point_rsync_uri
{
let new_num = manifest.manifest.manifest_number.bytes_be.as_slice();
let old_num = old_pack.manifest_number_be.as_slice();
if cmp_minimal_be_unsigned(new_num, old_num) != Ordering::Greater {
return Err(ManifestFreshError::ManifestNumberNotIncreasing {
old_hex: hex::encode_upper(old_num),
new_hex: hex::encode_upper(new_num),
});
}
let old_this_update = old_pack
.this_update
.parse()
.expect("pack internal validation ensures this_update parses");
if this_update <= old_this_update {
use time::format_description::well_known::Rfc3339;
return Err(ManifestFreshError::ThisUpdateNotIncreasing {
old_rfc3339_utc: old_this_update
.to_offset(time::UtcOffset::UTC)
.format(&Rfc3339)
.expect("format old thisUpdate"),
new_rfc3339_utc: this_update
.format(&Rfc3339)
.expect("format new thisUpdate"),
});
}
}
}
}
let mut files = Vec::with_capacity(manifest.manifest.files.len());
for entry in &manifest.manifest.files {
let rsync_uri = join_rsync_dir_and_file(publication_point_rsync_uri, &entry.file_name);
@ -274,10 +410,21 @@ fn try_build_fresh_pack(
));
}
Ok(VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
// RFC 6488 §3: manifest (signed object) validity includes a valid EE cert path.
// We validate this after §6.4/§6.5 so the issuer CRL can be selected from the publication point.
validate_manifest_embedded_ee_cert_path(
&manifest,
&files,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
)?;
Ok(FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: manifest_rsync_uri.to_string(),
publication_point_rsync_uri: publication_point_rsync_uri.to_string(),
manifest_number_be: manifest.manifest.manifest_number.bytes_be.clone(),
this_update: crate::storage::PackTime::from_utc_offset_datetime(this_update),
next_update: crate::storage::PackTime::from_utc_offset_datetime(next_update),
verified_at: crate::storage::PackTime::from_utc_offset_datetime(now),
@ -286,6 +433,14 @@ fn try_build_fresh_pack(
})
}
fn cmp_minimal_be_unsigned(a: &[u8], b: &[u8]) -> Ordering {
// Compare two minimal big-endian byte strings as unsigned integers.
// (Leading zeros are not expected; callers store minimal big-endian.)
a.len()
.cmp(&b.len())
.then_with(|| a.cmp(b))
}
fn join_rsync_dir_and_file(base: &str, file_name: &str) -> String {
if base.ends_with('/') {
format!("{base}{file_name}")
@ -293,3 +448,64 @@ fn join_rsync_dir_and_file(base: &str, file_name: &str) -> String {
format!("{base}/{file_name}")
}
}
fn rsync_uri_is_under_publication_point(uri: &str, publication_point_rsync_uri: &str) -> bool {
let pp = if publication_point_rsync_uri.ends_with('/') {
publication_point_rsync_uri.to_string()
} else {
format!("{publication_point_rsync_uri}/")
};
uri.starts_with(&pp)
}
fn validate_manifest_embedded_ee_cert_path(
manifest: &ManifestObject,
files: &[crate::storage::PackFile],
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
validation_time: time::OffsetDateTime,
) -> Result<(), ManifestFreshError> {
let ee = &manifest.signed_object.signed_data.certificates[0];
let ee_der = ee.raw_der.as_slice();
let crl_files = files
.iter()
.filter(|f| f.rsync_uri.ends_with(".crl"))
.collect::<Vec<_>>();
if crl_files.is_empty() {
return Err(ManifestFreshError::NoCrlFiles);
}
let Some(crldp_uris) = ee
.resource_cert
.tbs
.extensions
.crl_distribution_points_uris
.as_ref()
else {
return Err(ManifestFreshError::EeCrlDpMissing);
};
for u in crldp_uris {
let s = u.as_str();
if let Some(f) = crl_files.iter().find(|f| f.rsync_uri == s) {
let _validated = validate_ee_cert_path(
ee_der,
issuer_ca_der,
f.bytes.as_slice(),
issuer_ca_rsync_uri,
Some(f.rsync_uri.as_str()),
validation_time,
)?;
return Ok(());
}
}
Err(ManifestFreshError::EeCrlNotFound(
crldp_uris
.iter()
.map(|u| u.as_str())
.collect::<Vec<_>>()
.join(", "),
))
}

View File

@ -9,7 +9,7 @@ use crate::data_model::roa::{IpPrefix, RoaAfi, RoaDecodeError, RoaObject, RoaVal
use crate::data_model::signed_object::SignedObjectVerifyError;
use crate::policy::{Policy, SignedObjectFailurePolicy};
use crate::report::{RfcRef, Warning};
use crate::storage::{PackFile, VerifiedPublicationPointPack};
use crate::storage::{FetchCachePpPack, PackFile};
use crate::validation::cert_path::{CertPathError, validate_ee_cert_path};
const RFC_NONE: &[RfcRef] = &[];
@ -58,10 +58,10 @@ pub struct ObjectsStats {
pub publication_point_dropped: bool,
}
/// Process objects from a verified publication point pack using a known issuer CA certificate
/// Process objects from a fetch_cache_pp publication point pack using a known issuer CA certificate
/// and its effective resources (resolved via the resource-path, RFC 6487 §7.2).
pub fn process_verified_publication_point_pack_for_issuer(
pack: &VerifiedPublicationPointPack,
pub fn process_fetch_cache_pp_pack_for_issuer(
pack: &FetchCachePpPack,
policy: &Policy,
issuer_ca_der: &[u8],
issuer_ca_rsync_uri: Option<&str>,
@ -85,7 +85,7 @@ pub fn process_verified_publication_point_pack_for_issuer(
// Enforce that `manifest_bytes` is actually a manifest object.
let _manifest =
ManifestObject::decode_der(&pack.manifest_bytes).expect("verified pack manifest decodes");
ManifestObject::decode_der(&pack.manifest_bytes).expect("fetch_cache_pp manifest decodes");
let crl_files = pack
.files
@ -99,7 +99,7 @@ pub fn process_verified_publication_point_pack_for_issuer(
if crl_files.is_empty() && (stats.roa_total > 0 || stats.aspa_total > 0) {
stats.publication_point_dropped = true;
warnings.push(
Warning::new("dropping publication point: no CRL files in verified pack")
Warning::new("dropping publication point: no CRL files in fetch_cache_pp")
.with_rfc_refs(&[RfcRef("RFC 6487 §4.8.6"), RfcRef("RFC 9286 §7")])
.with_context(&pack.manifest_rsync_uri),
);
@ -110,7 +110,9 @@ pub fn process_verified_publication_point_pack_for_issuer(
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Roa,
result: AuditObjectResult::Skipped,
detail: Some("skipped due to missing CRL files in verified pack".to_string()),
detail: Some(
"skipped due to missing CRL files in fetch_cache_pp".to_string(),
),
});
} else if f.rsync_uri.ends_with(".asa") {
audit.push(ObjectAuditEntry {
@ -118,7 +120,9 @@ pub fn process_verified_publication_point_pack_for_issuer(
sha256_hex: sha256_hex_from_32(&f.sha256),
kind: AuditObjectKind::Aspa,
result: AuditObjectResult::Skipped,
detail: Some("skipped due to missing CRL files in verified pack".to_string()),
detail: Some(
"skipped due to missing CRL files in fetch_cache_pp".to_string(),
),
});
}
}
@ -357,12 +361,12 @@ enum ObjectValidateError {
MissingCrlDpUris,
#[error(
"no CRL available in verified pack (cannot validate certificates) (RFC 9286 §7; RFC 6487 §4.8.6)"
"no CRL available in fetch_cache_pp (cannot validate certificates) (RFC 9286 §7; RFC 6487 §4.8.6)"
)]
MissingCrlInPack,
#[error(
"CRL referenced by CRLDistributionPoints not found in verified pack: {0} (RFC 6487 §4.8.6; RFC 9286 §4.2.1)"
"CRL referenced by CRLDistributionPoints not found in fetch_cache_pp: {0} (RFC 6487 §4.8.6; RFC 9286 §4.2.1)"
)]
CrlNotFound(String),

View File

@ -1,12 +1,12 @@
use crate::data_model::rc::{AsResourceSet, IpResourceSet};
use crate::fetch::rsync::RsyncFetcher;
use crate::policy::Policy;
use crate::storage::{RocksStore, VerifiedKey};
use crate::storage::{FetchCachePpKey, RocksStore};
use crate::sync::repo::{RepoSyncResult, sync_publication_point};
use crate::sync::rrdp::Fetcher as HttpFetcher;
use crate::validation::manifest::{PublicationPointResult, process_manifest_publication_point};
use crate::validation::objects::{
ObjectsOutput, process_verified_publication_point_pack_for_issuer,
ObjectsOutput, process_fetch_cache_pp_pack_for_issuer,
};
#[derive(Clone, Debug, PartialEq, Eq)]
@ -29,8 +29,8 @@ pub enum RunError {
///
/// This orchestrates:
/// 1) repo sync (RRDP or rsync fallback) into `raw_objects`
/// 2) manifest RP processing into a verified pack (`verified:<manifest-rsync-uri>`)
/// 3) signed object processing (ROA/ASPA) from the verified pack
/// 2) manifest RP processing into a fetch_cache_pp pack (`fetch_cache_pp:<manifest-rsync-uri>`)
/// 3) signed object processing (ROA/ASPA) from the fetch_cache_pp pack
pub fn run_publication_point_once(
store: &RocksStore,
policy: &Policy,
@ -60,10 +60,12 @@ pub fn run_publication_point_once(
policy,
manifest_rsync_uri,
publication_point_rsync_uri,
issuer_ca_der,
issuer_ca_rsync_uri,
validation_time,
)?;
let objects = process_verified_publication_point_pack_for_issuer(
let objects = process_fetch_cache_pp_pack_for_issuer(
&publication_point.pack,
policy,
issuer_ca_der,
@ -80,10 +82,10 @@ pub fn run_publication_point_once(
})
}
pub fn verified_pack_exists(store: &RocksStore, manifest_rsync_uri: &str) -> Result<bool, String> {
let key = VerifiedKey::from_manifest_rsync_uri(manifest_rsync_uri);
pub fn fetch_cache_pp_exists(store: &RocksStore, manifest_rsync_uri: &str) -> Result<bool, String> {
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_rsync_uri);
store
.get_verified(&key)
.get_fetch_cache_pp(&key)
.map(|v| v.is_some())
.map_err(|e| e.to_string())
}

View File

@ -2,7 +2,7 @@ use crate::audit::DiscoveredFrom;
use crate::audit::PublicationPointAudit;
use crate::data_model::rc::{AsResourceSet, IpResourceSet};
use crate::report::{RfcRef, Warning};
use crate::storage::VerifiedPublicationPointPack;
use crate::storage::FetchCachePpPack;
use crate::validation::manifest::PublicationPointSource;
use crate::validation::objects::{AspaAttestation, ObjectsOutput, Vrp};
@ -54,14 +54,14 @@ impl CaInstanceHandle {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PublicationPointRunResult {
pub source: PublicationPointSource,
pub pack: VerifiedPublicationPointPack,
pub pack: FetchCachePpPack,
pub warnings: Vec<Warning>,
pub objects: ObjectsOutput,
pub audit: PublicationPointAudit,
/// Candidate child CA instances discovered from this publication point.
///
/// RFC 9286 §6.6 restriction is enforced by the tree engine: if this
/// publication point used verified cache due to failed fetch, children MUST NOT
/// publication point used fetch_cache_pp due to failed fetch, children MUST NOT
/// be enqueued/processed in this run.
pub discovered_children: Vec<DiscoveredChildCaInstance>,
}

View File

@ -11,7 +11,7 @@ use crate::sync::rrdp::Fetcher;
use crate::validation::ca_instance::ca_instance_uris_from_ca_certificate;
use crate::validation::ca_path::{CaPathError, validate_subordinate_ca_cert};
use crate::validation::manifest::{PublicationPointSource, process_manifest_publication_point};
use crate::validation::objects::process_verified_publication_point_pack_for_issuer;
use crate::validation::objects::process_fetch_cache_pp_pack_for_issuer;
use crate::validation::tree::{
CaInstanceHandle, DiscoveredChildCaInstance, PublicationPointRunResult, PublicationPointRunner,
};
@ -53,6 +53,8 @@ impl<'a> PublicationPointRunner for Rpkiv1PublicationPointRunner<'a> {
self.policy,
&ca.manifest_rsync_uri,
&ca.publication_point_rsync_uri,
&ca.ca_certificate_der,
ca.ca_certificate_rsync_uri.as_deref(),
self.validation_time,
) {
Ok(v) => v,
@ -61,7 +63,7 @@ impl<'a> PublicationPointRunner for Rpkiv1PublicationPointRunner<'a> {
warnings.extend(pp.warnings.clone());
let objects = process_verified_publication_point_pack_for_issuer(
let objects = process_fetch_cache_pp_pack_for_issuer(
&pp.pack,
self.policy,
&ca.ca_certificate_der,
@ -107,7 +109,7 @@ struct ChildDiscoveryOutput {
fn discover_children_from_fresh_pack_with_audit(
issuer: &CaInstanceHandle,
pack: &crate::storage::VerifiedPublicationPointPack,
pack: &crate::storage::FetchCachePpPack,
validation_time: time::OffsetDateTime,
) -> Result<ChildDiscoveryOutput, String> {
let issuer_ca_der = issuer.ca_certificate_der.as_slice();
@ -219,7 +221,7 @@ fn discover_children_from_fresh_pack_with_audit(
fn select_issuer_crl_from_pack<'a>(
child_cert_der: &[u8],
pack: &'a crate::storage::VerifiedPublicationPointPack,
pack: &'a crate::storage::FetchCachePpPack,
) -> Result<(&'a str, &'a [u8]), String> {
let child = crate::data_model::rc::ResourceCertificate::decode_der(child_cert_der)
.map_err(|e| format!("child certificate decode failed: {e}"))?;
@ -237,7 +239,7 @@ fn select_issuer_crl_from_pack<'a>(
}
Err(format!(
"CRL referenced by child certificate CRLDistributionPoints not found in verified pack: {} (RFC 6487 §4.8.6; RFC 9286 §4.2.1)",
"CRL referenced by child certificate CRLDistributionPoints not found in fetch_cache_pp: {} (RFC 6487 §4.8.6; RFC 9286 §4.2.1)",
crldp_uris
.iter()
.map(|u| u.as_str())
@ -359,7 +361,7 @@ fn build_publication_point_audit(
rrdp_notification_uri: ca.rrdp_notification_uri.clone(),
source: match pp.source {
PublicationPointSource::Fresh => "fresh".to_string(),
PublicationPointSource::VerifiedCache => "verified_cache".to_string(),
PublicationPointSource::FetchCachePp => "fetch_cache_pp".to_string(),
},
this_update_rfc3339_utc: pp.pack.this_update.rfc3339_utc.clone(),
next_update_rfc3339_utc: pp.pack.next_update.rfc3339_utc.clone(),
@ -374,7 +376,7 @@ mod tests {
use super::*;
use crate::data_model::rc::ResourceCertificate;
use crate::fetch::rsync::LocalDirRsyncFetcher;
use crate::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use crate::storage::{FetchCachePpPack, PackFile, PackTime};
use crate::sync::rrdp::Fetcher;
use crate::validation::tree::PublicationPointRunner;
@ -573,12 +575,13 @@ authorityKeyIdentifier = keyid:always
}
}
fn dummy_pack_with_files(files: Vec<PackFile>) -> VerifiedPublicationPointPack {
fn dummy_pack_with_files(files: Vec<PackFile>) -> FetchCachePpPack {
let now = time::OffsetDateTime::now_utc();
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: "rsync://example.test/repo/issuer/issuer.mft".to_string(),
publication_point_rsync_uri: "rsync://example.test/repo/issuer/".to_string(),
manifest_number_be: vec![1],
this_update: PackTime::from_utc_offset_datetime(now),
next_update: PackTime::from_utc_offset_datetime(now + time::Duration::hours(1)),
verified_at: PackTime::from_utc_offset_datetime(now),

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,8 @@
small-01 3160 1875 stored_point_container bench/full/cache/stored/rrdp/rrdp.twnic.tw/9e5ef8f1eb47c1bd46d2c578ebefa6a774fe140dbfbfeff1a018f1ebfc1de14b/rsync/rpkica.twnic.tw/rpki/TWNICCA/ATT/VRjTaiHoQb_taL2Agyl1Nd1wiwU.mft
small-02 3160 1875 stored_point_container bench/full/cache/stored/rrdp/rrdp.twnic.tw/9e5ef8f1eb47c1bd46d2c578ebefa6a774fe140dbfbfeff1a018f1ebfc1de14b/rsync/rpkica.twnic.tw/rpki/TWNICCA/BOT/Kn9vIkC0LSu8Df78l2yrIJHEfsc.mft
medium-01 16384 2681 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/69fd0156-bb1f-48b6-bf32-c9492286f195/a32aa006-ef22-4905-8c82-2651c19859b9/a32aa006-ef22-4905-8c82-2651c19859b9.mft
medium-02 16384 2681 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/6ead073c-b9f0-4523-b39e-a3c7dab97c25/dfc6770a-8775-4ba8-9d91-68facba79b33/dfc6770a-8775-4ba8-9d91-68facba79b33.mft
large-01 262869 2250 stored_point_container bench/full/cache/stored/rrdp/rpki-rrdp.us-east-2.amazonaws.com/c7cf30a5673053f1484f06ac72410809310e150ad73a8a7471cc95aad83b1e07/rsync/rpki-rsync.us-east-2.amazonaws.com/volume/16f1ffee-7461-4674-bb05-fddefa9a02c6/JmLOFOkF4Y68t1IvkrNoS8SGW00.mft
large-02 264331 10476 stored_point_container bench/full/cache/stored/rrdp/rrdp.ripe.net/2315d99a99627f34bc597569abc7c177ad45108a37f173f3d06dbabd64962f3c/rsync/rpki.ripe.net/repository/DEFAULT/fa/bc92e6-c8ee-48f0-ae7f-36ccb5a06195/1/tDgLm4wHBFftVLxF0S3d0kTgbVI.mft
xlarge-01 4522145 144968 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/fde169ed-d0d2-4165-8308-df2597e343f8/c2070b9e-789d-4f95-bca5-30e065e9a31d/c2070b9e-789d-4f95-bca5-30e065e9a31d.mft
xlarge-02 4615094 150261 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/f60c9f32-a87c-4339-a2f3-6299a3b02e29/b2f0a061-78dd-4d61-988a-266b192d9caa/b2f0a061-78dd-4d61-988a-266b192d9caa.mft
1 small-01 3160 1875 stored_point_container bench/full/cache/stored/rrdp/rrdp.twnic.tw/9e5ef8f1eb47c1bd46d2c578ebefa6a774fe140dbfbfeff1a018f1ebfc1de14b/rsync/rpkica.twnic.tw/rpki/TWNICCA/ATT/VRjTaiHoQb_taL2Agyl1Nd1wiwU.mft
2 small-02 3160 1875 stored_point_container bench/full/cache/stored/rrdp/rrdp.twnic.tw/9e5ef8f1eb47c1bd46d2c578ebefa6a774fe140dbfbfeff1a018f1ebfc1de14b/rsync/rpkica.twnic.tw/rpki/TWNICCA/BOT/Kn9vIkC0LSu8Df78l2yrIJHEfsc.mft
3 medium-01 16384 2681 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/69fd0156-bb1f-48b6-bf32-c9492286f195/a32aa006-ef22-4905-8c82-2651c19859b9/a32aa006-ef22-4905-8c82-2651c19859b9.mft
4 medium-02 16384 2681 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/6ead073c-b9f0-4523-b39e-a3c7dab97c25/dfc6770a-8775-4ba8-9d91-68facba79b33/dfc6770a-8775-4ba8-9d91-68facba79b33.mft
5 large-01 262869 2250 stored_point_container bench/full/cache/stored/rrdp/rpki-rrdp.us-east-2.amazonaws.com/c7cf30a5673053f1484f06ac72410809310e150ad73a8a7471cc95aad83b1e07/rsync/rpki-rsync.us-east-2.amazonaws.com/volume/16f1ffee-7461-4674-bb05-fddefa9a02c6/JmLOFOkF4Y68t1IvkrNoS8SGW00.mft
6 large-02 264331 10476 stored_point_container bench/full/cache/stored/rrdp/rrdp.ripe.net/2315d99a99627f34bc597569abc7c177ad45108a37f173f3d06dbabd64962f3c/rsync/rpki.ripe.net/repository/DEFAULT/fa/bc92e6-c8ee-48f0-ae7f-36ccb5a06195/1/tDgLm4wHBFftVLxF0S3d0kTgbVI.mft
7 xlarge-01 4522145 144968 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/fde169ed-d0d2-4165-8308-df2597e343f8/c2070b9e-789d-4f95-bca5-30e065e9a31d/c2070b9e-789d-4f95-bca5-30e065e9a31d.mft
8 xlarge-02 4615094 150261 stored_point_container bench/full/cache/stored/rrdp/rrdp.arin.net/e2c8cb4b372d841061fc231dcdc28a679c244f4f630d41cfe01c8c3aaa3a36f7/rsync/rpki.arin.net/repository/arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/f60c9f32-a87c-4339-a2f3-6299a3b02e29/b2f0a061-78dd-4d61-988a-266b192d9caa/b2f0a061-78dd-4d61-988a-266b192d9caa.mft

Binary file not shown.

Binary file not shown.

View File

@ -63,7 +63,7 @@ impl LiveStats {
rpki::validation::manifest::PublicationPointSource::Fresh => {
self.publication_points_fresh += 1
}
rpki::validation::manifest::PublicationPointSource::VerifiedCache => {
rpki::validation::manifest::PublicationPointSource::FetchCachePp => {
self.publication_points_cached += 1
}
}
@ -205,9 +205,9 @@ fn apnic_tree_full_stats_serial() {
}
}
let verified_total = store
.verified_iter_all()
.expect("verified_iter_all")
let fetch_cache_pp_total = store
.fetch_cache_pp_iter_all()
.expect("fetch_cache_pp_iter_all")
.count();
println!("APNIC Stage2 full-tree serial stats");
@ -238,6 +238,7 @@ fn apnic_tree_full_stats_serial() {
stats.pack_file_uris_unique.len()
);
println!("pack_uris_by_ext_total={:?}", stats.pack_uris_by_ext_total);
println!("fetch_cache_pp_total={fetch_cache_pp_total}");
println!();
println!(
"crl_total={} crl_decode_ok={}",
@ -264,7 +265,7 @@ fn apnic_tree_full_stats_serial() {
"rocksdb_raw_objects_total={} raw_by_ext={:?}",
raw_total, raw_by_ext
);
println!("rocksdb_verified_packs_total={}", verified_total);
println!("rocksdb_fetch_cache_pp_total={fetch_cache_pp_total}");
// Loose sanity assertions (avoid flakiness due to repository churn).
//

View File

@ -1,8 +1,8 @@
use rpki::audit::PublicationPointAudit;
use rpki::policy::{Policy, SignedObjectFailurePolicy};
use rpki::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackFile, PackTime};
use rpki::validation::manifest::PublicationPointSource;
use rpki::validation::objects::process_verified_publication_point_pack_for_issuer;
use rpki::validation::objects::process_fetch_cache_pp_pack_for_issuer;
use rpki::validation::tree::{
CaInstanceHandle, PublicationPointRunResult, PublicationPointRunner, TreeRunConfig,
run_tree_serial_audit,
@ -13,14 +13,15 @@ fn fixture_bytes(path: &str) -> Vec<u8> {
.unwrap_or_else(|e| panic!("read fixture {path}: {e}"))
}
fn dummy_pack(files: Vec<PackFile>) -> VerifiedPublicationPointPack {
fn dummy_pack(files: Vec<PackFile>) -> FetchCachePpPack {
let now = time::OffsetDateTime::now_utc();
let manifest_rsync_uri =
"rsync://rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft";
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: manifest_rsync_uri.to_string(),
publication_point_rsync_uri: "rsync://rpki.cernet.net/repo/cernet/0/".to_string(),
manifest_number_be: vec![1],
this_update: PackTime::from_utc_offset_datetime(now),
next_update: PackTime::from_utc_offset_datetime(now + time::Duration::hours(1)),
verified_at: PackTime::from_utc_offset_datetime(now),
@ -33,7 +34,7 @@ fn dummy_pack(files: Vec<PackFile>) -> VerifiedPublicationPointPack {
struct SinglePackRunner {
policy: Policy,
pack: VerifiedPublicationPointPack,
pack: FetchCachePpPack,
}
impl PublicationPointRunner for SinglePackRunner {
@ -41,7 +42,7 @@ impl PublicationPointRunner for SinglePackRunner {
&self,
ca: &CaInstanceHandle,
) -> Result<PublicationPointRunResult, String> {
let objects = process_verified_publication_point_pack_for_issuer(
let objects = process_fetch_cache_pp_pack_for_issuer(
&self.pack,
&self.policy,
&ca.ca_certificate_der,

View File

@ -0,0 +1,208 @@
use std::path::Path;
use rpki::data_model::manifest::ManifestObject;
use rpki::policy::{CaFailedFetchPolicy, Policy};
use rpki::storage::{FetchCachePpKey, FetchCachePpPack, RocksStore};
use rpki::validation::manifest::process_manifest_publication_point;
fn issuer_ca_fixture() -> Vec<u8> {
std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer ca fixture")
}
fn issuer_ca_rsync_uri() -> &'static str {
"rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"
}
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
.strip_prefix("tests/fixtures/repository")
.expect("path under tests/fixtures/repository");
let mut it = rel.components();
let host = it
.next()
.expect("host component")
.as_os_str()
.to_string_lossy();
let rest = it.as_path().to_string_lossy();
format!("rsync://{host}/{rest}")
}
fn fixture_dir_to_rsync_uri(dir: &Path) -> String {
let mut s = fixture_to_rsync_uri(dir);
if !s.ends_with('/') {
s.push('/');
}
s
}
fn load_cernet_manifest_fixture() -> (std::path::PathBuf, Vec<u8>, ManifestObject) {
let manifest_path = Path::new(
"tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft",
)
.to_path_buf();
let bytes = std::fs::read(&manifest_path).expect("read manifest fixture");
let obj = ManifestObject::decode_der(&bytes).expect("decode manifest fixture");
(manifest_path, bytes, obj)
}
fn store_raw_publication_point_files(
store: &RocksStore,
manifest_path: &Path,
manifest_rsync_uri: &str,
manifest_bytes: &[u8],
manifest: &ManifestObject,
publication_point_rsync_uri: &str,
) {
store
.put_raw(manifest_rsync_uri, manifest_bytes)
.expect("store manifest raw");
for entry in &manifest.manifest.files {
let file_path = manifest_path.parent().unwrap().join(&entry.file_name);
let bytes = std::fs::read(&file_path)
.unwrap_or_else(|_| panic!("read fixture file referenced by manifest: {file_path:?}"));
let rsync_uri = format!("{publication_point_rsync_uri}{}", entry.file_name);
store.put_raw(&rsync_uri, &bytes).expect("store file raw");
}
}
#[test]
fn cached_pack_revalidation_rejects_missing_file_referenced_by_manifest() {
let (manifest_path, manifest_bytes, manifest) = load_cernet_manifest_fixture();
let validation_time = manifest.manifest.this_update + time::Duration::seconds(1);
let manifest_rsync_uri = fixture_to_rsync_uri(&manifest_path);
let publication_point_rsync_uri = fixture_dir_to_rsync_uri(manifest_path.parent().unwrap());
let temp = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(temp.path()).expect("open rocksdb");
store_raw_publication_point_files(
&store,
&manifest_path,
&manifest_rsync_uri,
&manifest_bytes,
&manifest,
&publication_point_rsync_uri,
);
let mut policy = Policy::default();
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseFetchCachePp;
let issuer_ca_der = issuer_ca_fixture();
let _fresh = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("fresh run stores fetch_cache_pp");
let key = FetchCachePpKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let cached_bytes = store
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.expect("fetch_cache_pp exists");
let mut pack = FetchCachePpPack::decode(&cached_bytes).expect("decode pack");
// Remove one file from the pack: pack stays internally consistent, but no longer satisfies
// RFC 9286 §6.4 when revalidated against the manifest fileList.
pack.files.pop().expect("non-empty pack");
let bytes = pack.encode().expect("encode pack");
store
.put_fetch_cache_pp(&key, &bytes)
.expect("overwrite fetch_cache_pp");
// Force cache path: remove raw manifest so fresh processing fails at §6.2.
store
.delete_raw(&manifest_rsync_uri)
.expect("delete raw manifest");
let err = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect_err("cache pack missing file must be rejected");
let msg = err.to_string();
assert!(msg.contains("cached fetch_cache_pp missing file"), "{msg}");
assert!(msg.contains("RFC 9286 §6.4"), "{msg}");
}
#[test]
fn cached_pack_revalidation_rejects_hash_mismatch_against_manifest_filelist() {
let (manifest_path, manifest_bytes, manifest) = load_cernet_manifest_fixture();
let validation_time = manifest.manifest.this_update + time::Duration::seconds(1);
let manifest_rsync_uri = fixture_to_rsync_uri(&manifest_path);
let publication_point_rsync_uri = fixture_dir_to_rsync_uri(manifest_path.parent().unwrap());
let temp = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(temp.path()).expect("open rocksdb");
store_raw_publication_point_files(
&store,
&manifest_path,
&manifest_rsync_uri,
&manifest_bytes,
&manifest,
&publication_point_rsync_uri,
);
let mut policy = Policy::default();
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseFetchCachePp;
let issuer_ca_der = issuer_ca_fixture();
let _fresh = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("fresh run stores fetch_cache_pp");
let key = FetchCachePpKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let cached_bytes = store
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.expect("fetch_cache_pp exists");
let mut pack = FetchCachePpPack::decode(&cached_bytes).expect("decode pack");
// Mutate one file but keep pack internally consistent by recomputing its sha256 field.
let victim = pack.files.first_mut().expect("non-empty pack");
victim.bytes[0] ^= 0xFF;
victim.sha256 = victim.compute_sha256();
let bytes = pack.encode().expect("encode pack");
store
.put_fetch_cache_pp(&key, &bytes)
.expect("overwrite fetch_cache_pp");
// Force cache path.
store
.delete_raw(&manifest_rsync_uri)
.expect("delete raw manifest");
let err = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect_err("cache pack hash mismatch must be rejected");
let msg = err.to_string();
assert!(msg.contains("cached fetch_cache_pp file hash mismatch"), "{msg}");
assert!(msg.contains("RFC 9286 §6.5"), "{msg}");
}

View File

@ -2,9 +2,20 @@ use std::path::Path;
use rpki::data_model::manifest::ManifestObject;
use rpki::policy::{CaFailedFetchPolicy, Policy};
use rpki::storage::{RocksStore, VerifiedKey, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpKey, FetchCachePpPack, RocksStore};
use rpki::validation::manifest::process_manifest_publication_point;
fn issuer_ca_fixture() -> Vec<u8> {
std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer ca fixture")
}
fn issuer_ca_rsync_uri() -> &'static str {
"rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"
}
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
.strip_prefix("tests/fixtures/repository")
@ -33,18 +44,21 @@ fn cache_is_not_used_when_missing_and_fresh_manifest_is_missing() {
let store = RocksStore::open(temp.path()).expect("open rocksdb");
let mut policy = Policy::default();
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseVerifiedCache;
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseFetchCachePp;
let issuer_ca_der = issuer_ca_fixture();
let err = process_manifest_publication_point(
&store,
&policy,
"rsync://example.net/repo/manifest.mft",
"rsync://example.net/repo/",
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
time::OffsetDateTime::from_unix_timestamp(0).unwrap(),
)
.expect_err("no raw and no verified cache should fail");
.expect_err("no raw and no fetch_cache_pp should fail");
assert!(err.to_string().contains("verified cache entry missing"));
assert!(err.to_string().contains("fetch_cache_pp entry missing"));
}
#[test]
@ -75,27 +89,30 @@ fn cache_pack_publication_point_mismatch_is_rejected() {
}
let policy = Policy::default();
let issuer_ca_der = issuer_ca_fixture();
let _ = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("first run stores verified pack");
.expect("first run stores fetch_cache_pp pack");
// Corrupt the cached pack by changing the publication point.
let key = VerifiedKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let key = FetchCachePpKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let bytes = store
.get_verified(&key)
.expect("get verified")
.expect("verified exists");
let mut pack = VerifiedPublicationPointPack::decode(&bytes).expect("decode pack");
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.expect("fetch_cache_pp exists");
let mut pack = FetchCachePpPack::decode(&bytes).expect("decode pack");
pack.publication_point_rsync_uri = "rsync://evil.invalid/repo/".to_string();
let bytes = pack.encode().expect("re-encode pack");
store
.put_verified(&key, &bytes)
.expect("overwrite verified");
.put_fetch_cache_pp(&key, &bytes)
.expect("overwrite fetch_cache_pp");
// Remove raw manifest to force cache path.
store
@ -107,6 +124,8 @@ fn cache_pack_publication_point_mismatch_is_rejected() {
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect_err("cache pack mismatch should fail");

View File

@ -2,9 +2,20 @@ use std::path::Path;
use rpki::data_model::manifest::ManifestObject;
use rpki::policy::{CaFailedFetchPolicy, Policy};
use rpki::storage::{RocksStore, VerifiedKey, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpKey, FetchCachePpPack, RocksStore};
use rpki::validation::manifest::{PublicationPointSource, process_manifest_publication_point};
fn issuer_ca_fixture() -> Vec<u8> {
std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer ca fixture")
}
fn issuer_ca_rsync_uri() -> &'static str {
"rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"
}
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
.strip_prefix("tests/fixtures/repository")
@ -28,7 +39,7 @@ fn fixture_dir_to_rsync_uri(dir: &Path) -> String {
}
#[test]
fn manifest_success_writes_verified_pack() {
fn manifest_success_writes_fetch_cache_pp_pack() {
let manifest_path = Path::new(
"tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft",
);
@ -55,23 +66,26 @@ fn manifest_success_writes_verified_pack() {
}
let policy = Policy::default();
let issuer_ca_der = issuer_ca_fixture();
let out = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("process manifest publication point");
assert_eq!(out.source, PublicationPointSource::Fresh);
assert!(out.warnings.is_empty());
let key = VerifiedKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let key = FetchCachePpKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let stored = store
.get_verified(&key)
.expect("get verified")
.expect("verified pack exists");
let decoded = VerifiedPublicationPointPack::decode(&stored).expect("decode stored pack");
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.expect("fetch_cache_pp pack exists");
let decoded = FetchCachePpPack::decode(&stored).expect("decode stored pack");
assert_eq!(decoded.manifest_rsync_uri, manifest_rsync_uri);
assert_eq!(
decoded.publication_point_rsync_uri,
@ -80,7 +94,7 @@ fn manifest_success_writes_verified_pack() {
}
#[test]
fn manifest_hash_mismatch_falls_back_to_verified_cache_when_enabled() {
fn manifest_hash_mismatch_falls_back_to_fetch_cache_pp_when_enabled() {
let manifest_path = Path::new(
"tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft",
);
@ -106,22 +120,25 @@ fn manifest_hash_mismatch_falls_back_to_verified_cache_when_enabled() {
}
let policy = Policy::default();
let issuer_ca_der = issuer_ca_fixture();
let first = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("first run stores verified pack");
.expect("first run stores fetch_cache_pp pack");
assert_eq!(first.source, PublicationPointSource::Fresh);
let key = VerifiedKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let key = FetchCachePpKey::from_manifest_rsync_uri(&manifest_rsync_uri);
let cached_bytes = store
.get_verified(&key)
.expect("get verified")
.expect("verified pack exists");
let cached_pack = VerifiedPublicationPointPack::decode(&cached_bytes).expect("decode cached");
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.expect("fetch_cache_pp pack exists");
let cached_pack = FetchCachePpPack::decode(&cached_bytes).expect("decode cached");
let victim = manifest
.manifest
@ -141,10 +158,12 @@ fn manifest_hash_mismatch_falls_back_to_verified_cache_when_enabled() {
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("second run falls back to verified cache");
assert_eq!(second.source, PublicationPointSource::VerifiedCache);
.expect("second run falls back to fetch_cache_pp");
assert_eq!(second.source, PublicationPointSource::FetchCachePp);
assert!(!second.warnings.is_empty());
assert_eq!(second.pack, cached_pack);
}
@ -176,15 +195,18 @@ fn manifest_failed_fetch_stop_all_output() {
}
let mut policy = Policy::default();
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseVerifiedCache;
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::UseFetchCachePp;
let issuer_ca_der = issuer_ca_fixture();
let _ = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("first run stores verified pack");
.expect("first run stores fetch_cache_pp pack");
let victim = manifest
.manifest
@ -205,9 +227,11 @@ fn manifest_failed_fetch_stop_all_output() {
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect_err("stop_all_output should not use verified cache");
.expect_err("stop_all_output should not use fetch_cache_pp");
let msg = err.to_string();
assert!(msg.contains("cache use is disabled"));
}
@ -240,14 +264,17 @@ fn manifest_fallback_pack_is_revalidated_and_rejected_if_stale() {
}
let policy = Policy::default();
let issuer_ca_der = issuer_ca_fixture();
let _ = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
ok_time,
)
.expect("first run stores verified pack");
.expect("first run stores fetch_cache_pp pack");
store
.delete_raw(&manifest_rsync_uri)
@ -258,9 +285,75 @@ fn manifest_fallback_pack_is_revalidated_and_rejected_if_stale() {
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
stale_time,
)
.expect_err("stale validation_time must reject verified cache pack");
.expect_err("stale validation_time must reject fetch_cache_pp pack");
let msg = err.to_string();
assert!(msg.contains("not valid at validation_time"));
}
#[test]
fn manifest_replay_is_treated_as_failed_fetch_and_uses_fetch_cache_pp() {
let manifest_path = Path::new(
"tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft",
);
let manifest_bytes = std::fs::read(manifest_path).expect("read manifest fixture");
let manifest = ManifestObject::decode_der(&manifest_bytes).expect("decode manifest fixture");
let t1 = manifest.manifest.this_update + time::Duration::seconds(1);
let t2 = manifest.manifest.this_update + time::Duration::seconds(2);
let manifest_rsync_uri = fixture_to_rsync_uri(manifest_path);
let publication_point_rsync_uri = fixture_dir_to_rsync_uri(manifest_path.parent().unwrap());
let temp = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(temp.path()).expect("open rocksdb");
store
.put_raw(&manifest_rsync_uri, &manifest_bytes)
.expect("store manifest");
for entry in &manifest.manifest.files {
let file_path = manifest_path.parent().unwrap().join(&entry.file_name);
let bytes = std::fs::read(&file_path)
.unwrap_or_else(|_| panic!("read fixture file referenced by manifest: {file_path:?}"));
let rsync_uri = format!("{publication_point_rsync_uri}{}", entry.file_name);
store.put_raw(&rsync_uri, &bytes).expect("store file");
}
let policy = Policy::default();
let issuer_ca_der = issuer_ca_fixture();
let first = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
t1,
)
.expect("first run builds and stores fetch_cache_pp pack");
assert_eq!(first.source, PublicationPointSource::Fresh);
let second = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
t2,
)
.expect("second run should treat replay as failed fetch and use cache");
assert_eq!(second.source, PublicationPointSource::FetchCachePp);
assert_eq!(second.pack, first.pack);
assert!(
second
.warnings
.iter()
.any(|w| w.message.contains("manifestNumber not higher")),
"expected warning mentioning manifestNumber monotonicity"
);
}

View File

@ -0,0 +1,77 @@
use std::path::Path;
use rpki::data_model::manifest::ManifestObject;
use rpki::policy::{CaFailedFetchPolicy, Policy};
use rpki::storage::{FetchCachePpKey, RocksStore};
use rpki::validation::manifest::process_manifest_publication_point;
fn issuer_ca_fixture() -> Vec<u8> {
std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer ca fixture")
}
fn issuer_ca_rsync_uri() -> &'static str {
"rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"
}
#[test]
fn manifest_outside_publication_point_is_failed_fetch_rfc9286_section6_1() {
let fixture_manifest_path = Path::new(
"tests/fixtures/repository/rpki.cernet.net/repo/cernet/0/05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft",
);
let fixture_dir = fixture_manifest_path.parent().expect("fixture dir");
let manifest_bytes = std::fs::read(fixture_manifest_path).expect("read manifest fixture");
let manifest = ManifestObject::decode_der(&manifest_bytes).expect("decode manifest fixture");
let validation_time = manifest.manifest.this_update + time::Duration::seconds(1);
// Intentionally mismatch: manifest is NOT under the publication point URI.
let manifest_rsync_uri = "rsync://example.test/a/manifest.mft";
let publication_point_rsync_uri = "rsync://example.test/b/";
let temp = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(temp.path()).expect("open rocksdb");
// Store the manifest at its rsync URI.
store
.put_raw(manifest_rsync_uri, &manifest_bytes)
.expect("store manifest raw");
// Store all referenced files under the (different) publication point so that §6.4/§6.5
// would otherwise succeed if §6.1 was not enforced.
for entry in &manifest.manifest.files {
let file_path = fixture_dir.join(&entry.file_name);
let bytes = std::fs::read(&file_path)
.unwrap_or_else(|_| panic!("read fixture file referenced by manifest: {file_path:?}"));
let rsync_uri = format!("{publication_point_rsync_uri}{}", entry.file_name);
store.put_raw(&rsync_uri, &bytes).expect("store file raw");
}
let mut policy = Policy::default();
policy.ca_failed_fetch_policy = CaFailedFetchPolicy::StopAllOutput;
let issuer_ca_der = issuer_ca_fixture();
let err = process_manifest_publication_point(
&store,
&policy,
manifest_rsync_uri,
publication_point_rsync_uri,
&issuer_ca_der,
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect_err("§6.1 mismatch must be treated as failed fetch");
let msg = err.to_string();
assert!(msg.contains("RFC 9286 §6.1"), "{msg}");
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_rsync_uri);
assert!(
store
.get_fetch_cache_pp(&key)
.expect("get fetch_cache_pp")
.is_none(),
"must not write fetch_cache_pp on failed fetch"
);
}

View File

@ -6,7 +6,7 @@ use rpki::data_model::rc::ResourceCertificate;
use rpki::policy::{Policy, SignedObjectFailurePolicy};
use rpki::storage::{PackFile, RocksStore};
use rpki::validation::manifest::process_manifest_publication_point;
use rpki::validation::objects::process_verified_publication_point_pack_for_issuer;
use rpki::validation::objects::process_fetch_cache_pp_pack_for_issuer;
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
@ -31,7 +31,7 @@ fn fixture_dir_to_rsync_uri(dir: &Path) -> String {
}
fn build_cernet_pack_and_validation_time() -> (
rpki::storage::VerifiedPublicationPointPack,
rpki::storage::FetchCachePpPack,
time::OffsetDateTime,
Vec<u8>,
ResourceCertificate,
@ -59,20 +59,22 @@ fn build_cernet_pack_and_validation_time() -> (
store.put_raw(&rsync_uri, &bytes).expect("store file");
}
let issuer_ca_der = std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer CA cert fixture");
let policy = Policy::default();
let out = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some("rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"),
manifest.manifest.this_update + time::Duration::seconds(1),
)
.expect("process manifest publication point");
let issuer_ca_der = std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer CA cert fixture");
let issuer_ca = ResourceCertificate::decode_der(&issuer_ca_der).expect("decode issuer CA cert");
let crl_file = out
@ -104,7 +106,7 @@ fn missing_crl_causes_roas_to_be_dropped_under_drop_object_policy() {
let mut policy = Policy::default();
policy.signed_object_failure_policy = SignedObjectFailurePolicy::DropObject;
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,
@ -129,7 +131,7 @@ fn wrong_issuer_ca_cert_causes_roas_to_be_dropped_under_drop_object_policy() {
// Use an unrelated trust anchor certificate as the issuer to force EE cert path validation to fail.
let wrong_issuer_ca_der =
std::fs::read("tests/fixtures/ta/arin-ta.cer").expect("read wrong issuer ca");
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&wrong_issuer_ca_der,
@ -156,7 +158,7 @@ fn invalid_aspa_object_is_reported_as_warning_under_drop_object_policy() {
let mut policy = Policy::default();
policy.signed_object_failure_policy = SignedObjectFailurePolicy::DropObject;
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,

View File

@ -6,7 +6,7 @@ use rpki::data_model::rc::ResourceCertificate;
use rpki::policy::{Policy, SignedObjectFailurePolicy};
use rpki::storage::{PackFile, RocksStore};
use rpki::validation::manifest::process_manifest_publication_point;
use rpki::validation::objects::process_verified_publication_point_pack_for_issuer;
use rpki::validation::objects::process_fetch_cache_pp_pack_for_issuer;
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
@ -31,7 +31,7 @@ fn fixture_dir_to_rsync_uri(dir: &Path) -> String {
}
fn build_cernet_pack_and_validation_time() -> (
rpki::storage::VerifiedPublicationPointPack,
rpki::storage::FetchCachePpPack,
time::OffsetDateTime,
Vec<u8>,
ResourceCertificate,
@ -59,20 +59,22 @@ fn build_cernet_pack_and_validation_time() -> (
store.put_raw(&rsync_uri, &bytes).expect("store file");
}
let issuer_ca_der = std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer CA cert fixture");
let policy = Policy::default();
let out = process_manifest_publication_point(
&store,
&policy,
&manifest_rsync_uri,
&publication_point_rsync_uri,
&issuer_ca_der,
Some("rsync://rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer"),
manifest.manifest.this_update + time::Duration::seconds(1),
)
.expect("process manifest publication point");
let issuer_ca_der = std::fs::read(
"tests/fixtures/repository/rpki.apnic.net/repository/B527EF581D6611E2BB468F7C72FD1FF2/BfycW4hQb3wNP4YsiJW-1n6fjro.cer",
)
.expect("read issuer CA cert fixture");
let issuer_ca = ResourceCertificate::decode_der(&issuer_ca_der).expect("decode issuer CA cert");
let crl_file = out
@ -127,7 +129,7 @@ fn drop_object_policy_drops_only_failing_object() {
let mut policy = Policy::default();
policy.signed_object_failure_policy = SignedObjectFailurePolicy::DropObject;
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,
@ -169,7 +171,7 @@ fn drop_publication_point_policy_drops_the_publication_point() {
let mut policy = Policy::default();
policy.signed_object_failure_policy = SignedObjectFailurePolicy::DropPublicationPoint;
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,

View File

@ -1,10 +1,10 @@
use rpki::fetch::rsync::LocalDirRsyncFetcher;
use rpki::policy::{Policy, SignedObjectFailurePolicy, SyncPreference};
use rpki::storage::{PackFile, PackTime, RocksStore, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackFile, PackTime, RocksStore};
use rpki::sync::repo::sync_publication_point;
use rpki::sync::rrdp::Fetcher;
use rpki::validation::manifest::process_manifest_publication_point;
use rpki::validation::objects::process_verified_publication_point_pack_for_issuer;
use rpki::validation::objects::process_fetch_cache_pp_pack_for_issuer;
struct NoopHttpFetcher;
impl Fetcher for NoopHttpFetcher {
@ -56,12 +56,13 @@ fn minimal_pack(
manifest_bytes: Vec<u8>,
files: Vec<PackFile>,
validation_time: time::OffsetDateTime,
) -> VerifiedPublicationPointPack {
) -> FetchCachePpPack {
// Keep times consistent enough to pass internal pack validation.
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: manifest_rsync_uri.to_string(),
publication_point_rsync_uri: publication_point_rsync_uri.to_string(),
manifest_number_be: vec![1],
this_update: PackTime::from_utc_offset_datetime(validation_time),
next_update: PackTime::from_utc_offset_datetime(validation_time + time::Duration::hours(1)),
verified_at: PackTime::from_utc_offset_datetime(validation_time),
@ -70,12 +71,12 @@ fn minimal_pack(
}
}
fn build_verified_pack_from_local_rsync_fixture(
fn build_fetch_cache_pp_from_local_rsync_fixture(
dir: &std::path::Path,
rsync_base_uri: &str,
manifest_rsync_uri: &str,
validation_time: time::OffsetDateTime,
) -> rpki::storage::VerifiedPublicationPointPack {
) -> rpki::storage::FetchCachePpPack {
let store_dir = tempfile::tempdir().expect("store dir");
let store = RocksStore::open(store_dir.path()).expect("open rocksdb");
let policy = Policy {
@ -98,6 +99,8 @@ fn build_verified_pack_from_local_rsync_fixture(
&policy,
manifest_rsync_uri,
rsync_base_uri,
issuer_ca_fixture().as_slice(),
Some(issuer_ca_rsync_uri()),
validation_time,
)
.expect("process manifest");
@ -111,7 +114,7 @@ fn process_pack_for_issuer_extracts_vrps_from_real_cernet_fixture() {
let manifest_rsync_uri = format!("{rsync_base_uri}{manifest_file}");
let validation_time = validation_time_from_manifest_fixture(&dir, &manifest_file);
let pack = build_verified_pack_from_local_rsync_fixture(
let pack = build_fetch_cache_pp_from_local_rsync_fixture(
&dir,
&rsync_base_uri,
&manifest_rsync_uri,
@ -123,7 +126,7 @@ fn process_pack_for_issuer_extracts_vrps_from_real_cernet_fixture() {
.expect("decode issuer ca");
let policy = Policy::default();
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,
@ -147,7 +150,7 @@ fn signed_object_failure_policy_drop_object_drops_only_bad_object() {
let manifest_rsync_uri = format!("{rsync_base_uri}{manifest_file}");
let validation_time = validation_time_from_manifest_fixture(&dir, &manifest_file);
let mut pack = build_verified_pack_from_local_rsync_fixture(
let mut pack = build_fetch_cache_pp_from_local_rsync_fixture(
&dir,
&rsync_base_uri,
&manifest_rsync_uri,
@ -177,7 +180,7 @@ fn signed_object_failure_policy_drop_object_drops_only_bad_object() {
signed_object_failure_policy: SignedObjectFailurePolicy::DropObject,
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,
@ -209,7 +212,7 @@ fn signed_object_failure_policy_drop_publication_point_drops_all_output() {
let manifest_rsync_uri = format!("{rsync_base_uri}{manifest_file}");
let validation_time = validation_time_from_manifest_fixture(&dir, &manifest_file);
let mut pack = build_verified_pack_from_local_rsync_fixture(
let mut pack = build_fetch_cache_pp_from_local_rsync_fixture(
&dir,
&rsync_base_uri,
&manifest_rsync_uri,
@ -239,7 +242,7 @@ fn signed_object_failure_policy_drop_publication_point_drops_all_output() {
signed_object_failure_policy: SignedObjectFailurePolicy::DropPublicationPoint,
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&issuer_ca_der,
@ -293,7 +296,7 @@ fn process_pack_for_issuer_without_crl_drops_publication_point() {
);
let policy = Policy::default();
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[],
@ -335,7 +338,7 @@ fn process_pack_for_issuer_handles_invalid_aspa_bytes() {
signed_object_failure_policy: SignedObjectFailurePolicy::DropObject,
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[],
@ -377,7 +380,7 @@ fn process_pack_for_issuer_drop_publication_point_on_invalid_aspa_bytes() {
signed_object_failure_policy: SignedObjectFailurePolicy::DropPublicationPoint,
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[],

View File

@ -1,18 +1,19 @@
use rpki::policy::{Policy, SignedObjectFailurePolicy};
use rpki::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use rpki::validation::objects::process_verified_publication_point_pack_for_issuer;
use rpki::storage::{FetchCachePpPack, PackFile, PackTime};
use rpki::validation::objects::process_fetch_cache_pp_pack_for_issuer;
fn fixture_bytes(path: &str) -> Vec<u8> {
std::fs::read(std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(path))
.unwrap_or_else(|e| panic!("read fixture {path}: {e}"))
}
fn dummy_pack(manifest_bytes: Vec<u8>, files: Vec<PackFile>) -> VerifiedPublicationPointPack {
fn dummy_pack(manifest_bytes: Vec<u8>, files: Vec<PackFile>) -> FetchCachePpPack {
let now = time::OffsetDateTime::now_utc();
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: "rsync://example.test/repo/pp/manifest.mft".to_string(),
publication_point_rsync_uri: "rsync://example.test/repo/pp/".to_string(),
manifest_number_be: vec![1],
this_update: PackTime::from_utc_offset_datetime(now),
next_update: PackTime::from_utc_offset_datetime(now + time::Duration::hours(1)),
verified_at: PackTime::from_utc_offset_datetime(now),
@ -59,7 +60,7 @@ fn process_pack_drop_object_on_wrong_issuer_ca_for_roa() {
};
let wrong_issuer_ca_der = fixture_bytes("tests/fixtures/ta/arin-ta.cer");
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&wrong_issuer_ca_der,
@ -121,7 +122,7 @@ fn process_pack_drop_publication_point_on_wrong_issuer_ca_for_roa_skips_rest() {
};
let wrong_issuer_ca_der = fixture_bytes("tests/fixtures/ta/arin-ta.cer");
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&wrong_issuer_ca_der,
@ -170,7 +171,7 @@ fn process_pack_drop_object_on_wrong_issuer_ca_for_aspa() {
};
let wrong_issuer_ca_der = fixture_bytes("tests/fixtures/ta/arin-ta.cer");
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&wrong_issuer_ca_der,
@ -225,7 +226,7 @@ fn process_pack_drop_publication_point_on_wrong_issuer_ca_for_aspa_skips_rest()
};
let wrong_issuer_ca_der = fixture_bytes("tests/fixtures/ta/arin-ta.cer");
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&wrong_issuer_ca_der,
@ -261,7 +262,7 @@ fn process_pack_for_issuer_marks_objects_skipped_when_missing_issuer_crl() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],
@ -303,7 +304,7 @@ fn process_pack_for_issuer_drop_object_records_errors_and_continues() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],
@ -344,7 +345,7 @@ fn process_pack_for_issuer_drop_publication_point_records_skips_for_rest() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],
@ -394,7 +395,7 @@ fn process_pack_for_issuer_selects_crl_by_ee_crldp_uri_roa() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],
@ -435,7 +436,7 @@ fn process_pack_for_issuer_rejects_roa_when_crldp_crl_missing() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],
@ -490,7 +491,7 @@ fn process_pack_for_issuer_selects_crl_by_ee_crldp_uri_aspa() {
..Policy::default()
};
let out = process_verified_publication_point_pack_for_issuer(
let out = process_fetch_cache_pp_pack_for_issuer(
&pack,
&policy,
&[0x01, 0x02, 0x03],

View File

@ -6,7 +6,7 @@ fn policy_defaults_are_correct() {
assert_eq!(p.sync_preference, SyncPreference::RrdpThenRsync);
assert_eq!(
p.ca_failed_fetch_policy,
CaFailedFetchPolicy::UseVerifiedCache
CaFailedFetchPolicy::UseFetchCachePp
);
assert_eq!(
p.signed_object_failure_policy,

View File

@ -7,7 +7,7 @@ use rpki::fetch::rsync::LocalDirRsyncFetcher;
use rpki::policy::{Policy, SyncPreference};
use rpki::storage::RocksStore;
use rpki::sync::rrdp::Fetcher;
use rpki::validation::run::{run_publication_point_once, verified_pack_exists};
use rpki::validation::run::{fetch_cache_pp_exists, run_publication_point_once};
fn fixture_to_rsync_uri(path: &Path) -> String {
let rel = path
@ -40,7 +40,7 @@ impl Fetcher for NeverHttpFetcher {
}
#[test]
fn e2e_offline_uses_rsync_then_writes_verified_pack_then_outputs_vrps() {
fn e2e_offline_uses_rsync_then_writes_fetch_cache_pp_then_outputs_vrps() {
let fixture_dir = Path::new("tests/fixtures/repository/rpki.cernet.net/repo/cernet/0");
let rsync_base_uri = "rsync://rpki.cernet.net/repo/cernet/0/";
let manifest_path = fixture_dir.join("05FC9C5B88506F7C0D3F862C8895BED67E9F8EBA.mft");
@ -105,7 +105,7 @@ fn e2e_offline_uses_rsync_then_writes_verified_pack_then_outputs_vrps() {
)
.expect("run publication point once");
assert!(verified_pack_exists(&store, &manifest_rsync_uri).expect("exists check"));
assert!(fetch_cache_pp_exists(&store, &manifest_rsync_uri).expect("exists check"));
assert_eq!(out.repo_sync.objects_written, expected_files);
assert!(

View File

@ -1,7 +1,7 @@
use rpki::storage::{RocksStore, VerifiedKey};
use rpki::storage::{FetchCachePpKey, RocksStore};
#[test]
fn storage_iter_all_lists_raw_and_verified_entries() {
fn storage_iter_all_lists_raw_and_fetch_cache_pp_entries() {
let temp = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(temp.path()).expect("open rocksdb");
@ -12,8 +12,10 @@ fn storage_iter_all_lists_raw_and_verified_entries() {
.put_raw("rsync://example.test/repo/b.roa", b"b")
.expect("put_raw b");
let key = VerifiedKey::from_manifest_rsync_uri("rsync://example.test/repo/m.mft");
store.put_verified(&key, b"x").expect("put_verified");
let key = FetchCachePpKey::from_manifest_rsync_uri("rsync://example.test/repo/m.mft");
store
.put_fetch_cache_pp(&key, b"x")
.expect("put_fetch_cache_pp");
let raw_keys = store
.raw_iter_all()
@ -24,10 +26,10 @@ fn storage_iter_all_lists_raw_and_verified_entries() {
assert!(raw_keys.contains(&"rsync://example.test/repo/a.cer".to_string()));
assert!(raw_keys.contains(&"rsync://example.test/repo/b.roa".to_string()));
let verified_keys = store
.verified_iter_all()
.expect("verified_iter_all")
let keys = store
.fetch_cache_pp_iter_all()
.expect("fetch_cache_pp_iter_all")
.map(|(k, _v)| String::from_utf8(k.to_vec()).expect("utf8 key"))
.collect::<Vec<_>>();
assert_eq!(verified_keys, vec![key.as_str().to_string()]);
assert_eq!(keys, vec![key.as_str().to_string()]);
}

View File

@ -1,6 +1,6 @@
use rocksdb::WriteBatch;
use rpki::storage::{RocksStore, VerifiedKey};
use rpki::storage::{FetchCachePpKey, RocksStore};
#[test]
fn storage_delete_rrdp_state_works() {
@ -58,9 +58,12 @@ fn storage_raw_iter_prefix_filters_by_prefix() {
}
#[test]
fn storage_verified_key_format_is_stable() {
let k = VerifiedKey::from_manifest_rsync_uri("rsync://example.net/repo/manifest.mft");
assert_eq!(k.as_str(), "verified:rsync://example.net/repo/manifest.mft");
fn storage_fetch_cache_pp_key_format_is_stable() {
let k = FetchCachePpKey::from_manifest_rsync_uri("rsync://example.net/repo/manifest.mft");
assert_eq!(
k.as_str(),
"fetch_cache_pp:rsync://example.net/repo/manifest.mft"
);
}
#[test]

View File

@ -1,6 +1,6 @@
use std::path::Path;
use rpki::storage::{RocksStore, VerifiedKey};
use rpki::storage::{FetchCachePpKey, RocksStore};
#[test]
fn storage_opens_and_creates_column_families() {
@ -25,22 +25,22 @@ fn raw_objects_roundtrip_by_rsync_uri() {
}
#[test]
fn verified_pack_roundtrip_by_manifest_uri() {
fn fetch_cache_pp_roundtrip_by_manifest_uri() {
let dir = tempfile::tempdir().expect("tempdir");
let store = RocksStore::open(dir.path()).expect("open rocksdb");
let manifest_uri = "rsync://example.invalid/repo/manifest.mft";
let verified_key = VerifiedKey::from_manifest_rsync_uri(manifest_uri);
let key = FetchCachePpKey::from_manifest_rsync_uri(manifest_uri);
assert_eq!(
verified_key.as_str(),
"verified:rsync://example.invalid/repo/manifest.mft"
key.as_str(),
"fetch_cache_pp:rsync://example.invalid/repo/manifest.mft"
);
let bytes = b"pack";
store
.put_verified(&verified_key, bytes)
.expect("put verified");
let got = store.get_verified(&verified_key).expect("get verified");
.put_fetch_cache_pp(&key, bytes)
.expect("put fetch_cache_pp");
let got = store.get_fetch_cache_pp(&key).expect("get fetch_cache_pp");
assert_eq!(got.as_deref(), Some(bytes.as_slice()));
}

View File

@ -2,7 +2,7 @@ use std::collections::HashMap;
use rpki::audit::{DiscoveredFrom, PublicationPointAudit};
use rpki::report::Warning;
use rpki::storage::{PackTime, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackTime};
use rpki::validation::manifest::PublicationPointSource;
use rpki::validation::objects::{ObjectsOutput, ObjectsStats};
use rpki::validation::tree::{
@ -10,11 +10,12 @@ use rpki::validation::tree::{
TreeRunConfig, run_tree_serial,
};
fn empty_pack(manifest_uri: &str, pp_uri: &str) -> VerifiedPublicationPointPack {
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
fn empty_pack(manifest_uri: &str, pp_uri: &str) -> FetchCachePpPack {
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
publication_point_rsync_uri: pp_uri.to_string(),
manifest_rsync_uri: manifest_uri.to_string(),
manifest_number_be: vec![1],
this_update: PackTime {
rfc3339_utc: "2026-01-01T00:00:00Z".to_string(),
},

View File

@ -2,7 +2,7 @@ use std::collections::HashMap;
use rpki::audit::{DiscoveredFrom, PublicationPointAudit};
use rpki::report::Warning;
use rpki::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackFile, PackTime};
use rpki::validation::manifest::PublicationPointSource;
use rpki::validation::objects::{ObjectsOutput, ObjectsStats};
use rpki::validation::tree::{
@ -43,11 +43,12 @@ impl PublicationPointRunner for MockRunner {
}
}
fn empty_pack(manifest_uri: &str, pp_uri: &str) -> VerifiedPublicationPointPack {
VerifiedPublicationPointPack {
fn empty_pack(manifest_uri: &str, pp_uri: &str) -> FetchCachePpPack {
FetchCachePpPack {
format_version: 1,
publication_point_rsync_uri: pp_uri.to_string(),
manifest_rsync_uri: manifest_uri.to_string(),
manifest_number_be: vec![1],
this_update: PackTime {
rfc3339_utc: "2026-01-01T00:00:00Z".to_string(),
},
@ -129,7 +130,7 @@ fn tree_enqueues_children_only_for_fresh_publication_points() {
.with(
child1_manifest,
PublicationPointRunResult {
source: PublicationPointSource::VerifiedCache,
source: PublicationPointSource::FetchCachePp,
pack: empty_pack(child1_manifest, "rsync://example.test/repo/child1/"),
warnings: vec![Warning::new("child1 warning")],
objects: ObjectsOutput {

View File

@ -1,6 +1,6 @@
use rpki::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackFile, PackTime};
fn sample_pack() -> VerifiedPublicationPointPack {
fn sample_pack() -> FetchCachePpPack {
let this_update =
PackTime::from_utc_offset_datetime(time::OffsetDateTime::from_unix_timestamp(0).unwrap());
let next_update = PackTime::from_utc_offset_datetime(
@ -18,10 +18,11 @@ fn sample_pack() -> VerifiedPublicationPointPack {
b"cer-bytes".to_vec(),
);
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: "rsync://example.net/repo/CA/manifest.mft".to_string(),
publication_point_rsync_uri: "rsync://example.net/repo/CA/".to_string(),
manifest_number_be: vec![1],
this_update,
next_update,
verified_at,
@ -34,7 +35,7 @@ fn sample_pack() -> VerifiedPublicationPointPack {
fn pack_encode_decode_roundtrip() {
let pack = sample_pack();
let bytes = pack.encode().expect("encode");
let decoded = VerifiedPublicationPointPack::decode(&bytes).expect("decode");
let decoded = FetchCachePpPack::decode(&bytes).expect("decode");
assert_eq!(decoded, pack);
}
@ -43,7 +44,7 @@ fn pack_rejects_missing_manifest() {
let mut pack = sample_pack();
pack.manifest_bytes.clear();
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
@ -53,14 +54,14 @@ fn pack_rejects_duplicate_rsync_uri_entries() {
PackFile::from_bytes_compute_sha256("rsync://example.net/repo/CA/1.crl", b"other".to_vec());
pack.files.push(dup);
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
fn pack_includes_this_update_next_update() {
let pack = sample_pack();
let bytes = pack.encode().expect("encode");
let decoded = VerifiedPublicationPointPack::decode(&bytes).expect("decode");
let decoded = FetchCachePpPack::decode(&bytes).expect("decode");
let this_update = decoded.this_update.parse().expect("parse this_update");
let next_update = decoded.next_update.parse().expect("parse next_update");

View File

@ -1,6 +1,6 @@
use rpki::storage::{PackFile, PackTime, VerifiedPublicationPointPack};
use rpki::storage::{FetchCachePpPack, PackFile, PackTime};
fn base_pack() -> VerifiedPublicationPointPack {
fn base_pack() -> FetchCachePpPack {
let this_update =
PackTime::from_utc_offset_datetime(time::OffsetDateTime::from_unix_timestamp(0).unwrap());
let next_update = PackTime::from_utc_offset_datetime(
@ -12,10 +12,11 @@ fn base_pack() -> VerifiedPublicationPointPack {
let file =
PackFile::from_bytes_compute_sha256("rsync://example.net/repo/obj.cer", b"x".to_vec());
VerifiedPublicationPointPack {
format_version: VerifiedPublicationPointPack::FORMAT_VERSION_V1,
FetchCachePpPack {
format_version: FetchCachePpPack::FORMAT_VERSION_V1,
manifest_rsync_uri: "rsync://example.net/repo/manifest.mft".to_string(),
publication_point_rsync_uri: "rsync://example.net/repo/".to_string(),
manifest_number_be: vec![1],
this_update,
next_update,
verified_at,
@ -30,7 +31,7 @@ fn pack_rejects_unsupported_format_version() {
pack.format_version = 999;
let bytes = pack.encode().expect("encode");
assert!(
VerifiedPublicationPointPack::decode(&bytes)
FetchCachePpPack::decode(&bytes)
.unwrap_err()
.to_string()
.contains("unsupported pack format_version")
@ -42,7 +43,7 @@ fn pack_rejects_missing_manifest_rsync_uri() {
let mut pack = base_pack();
pack.manifest_rsync_uri.clear();
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
@ -50,7 +51,34 @@ fn pack_rejects_missing_publication_point_rsync_uri() {
let mut pack = base_pack();
pack.publication_point_rsync_uri.clear();
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
fn pack_rejects_missing_manifest_number() {
let mut pack = base_pack();
pack.manifest_number_be.clear();
let bytes = pack.encode().expect("encode");
let err = FetchCachePpPack::decode(&bytes).unwrap_err();
assert!(err.to_string().contains("missing required field"));
}
#[test]
fn pack_rejects_manifest_number_too_long() {
let mut pack = base_pack();
pack.manifest_number_be = vec![1u8; 21];
let bytes = pack.encode().expect("encode");
let err = FetchCachePpPack::decode(&bytes).unwrap_err();
assert!(err.to_string().contains("at most 20 octets"));
}
#[test]
fn pack_rejects_manifest_number_with_leading_zeros() {
let mut pack = base_pack();
pack.manifest_number_be = vec![0u8, 1u8];
let bytes = pack.encode().expect("encode");
let err = FetchCachePpPack::decode(&bytes).unwrap_err();
assert!(err.to_string().contains("leading zeros"));
}
#[test]
@ -60,7 +88,7 @@ fn pack_rejects_invalid_time_fields() {
rfc3339_utc: "not-a-time".to_string(),
};
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
@ -74,7 +102,7 @@ fn pack_rejects_empty_file_bytes() {
sha,
)];
let bytes = pack.encode().expect("encode");
assert!(VerifiedPublicationPointPack::decode(&bytes).is_err());
assert!(FetchCachePpPack::decode(&bytes).is_err());
}
#[test]
@ -86,7 +114,7 @@ fn pack_rejects_file_hash_mismatch() {
[0u8; 32],
)];
let bytes = pack.encode().expect("encode");
let err = VerifiedPublicationPointPack::decode(&bytes).unwrap_err();
let err = FetchCachePpPack::decode(&bytes).unwrap_err();
assert!(err.to_string().contains("file hash mismatch"));
}
@ -96,6 +124,6 @@ fn pack_rejects_missing_file_rsync_uri() {
let file = PackFile::from_bytes_compute_sha256("", b"x".to_vec());
pack.files = vec![file];
let bytes = pack.encode().expect("encode");
let err = VerifiedPublicationPointPack::decode(&bytes).unwrap_err();
let err = FetchCachePpPack::decode(&bytes).unwrap_err();
assert!(err.to_string().contains("missing required field"));
}