Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 25 additions & 0 deletions common/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -735,6 +735,31 @@ pub enum Point {
},
}

impl Point {
pub fn slot(&self) -> Slot {
match self {
Self::Origin => 0,
Self::Specific { slot, .. } => *slot,
}
}

pub fn hash(&self) -> Option<&BlockHash> {
match self {
Self::Origin => None,
Self::Specific { hash, .. } => Some(hash),
}
}
}

impl Display for Point {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Origin => write!(f, "origin"),
Self::Specific { hash, slot } => write!(f, "{}@{}", hash, slot),
}
}
}

/// Amount of Ada, in Lovelace
pub type Lovelace = u64;
pub type LovelaceDelta = i64;
Expand Down
2 changes: 2 additions & 0 deletions modules/snapshot_bootstrapper/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ acropolis_common = { path = "../../common" }
caryatid_sdk = { workspace = true }

anyhow = { workspace = true }
pallas-primitives = { workspace = true }
config = { workspace = true }
minicbor = { version = "0.25.1", features = ["std", "half", "derive"] }
tokio = { workspace = true }
tracing = { workspace = true }
serde = { workspace = true, features = ["rc"] }
Expand Down
4 changes: 4 additions & 0 deletions modules/snapshot_bootstrapper/data/mainnet/headers.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[
"134524816.82dbc35b32bcbbad4e14cda9b1bfb1ceeee4d2fb8d2f299caf2008cfe072bd54",
"134956789.6558deef007ba372a414466e49214368c17c1f8428093193fc187d1c4587053c"
]
Binary file not shown.
Binary file not shown.
7 changes: 7 additions & 0 deletions modules/snapshot_bootstrapper/data/mainnet/nonces.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"at": "134956789.6558deef007ba372a414466e49214368c17c1f8428093193fc187d1c4587053c",
"active": "0b9e320e63bf995b81287ce7a624b6735d98b083cc1a0e2ae8b08b680c79c983",
"candidate": "6cc4dafecbe0d593ca0dee64518542f5faa741538791ac7fc2d5008f32d5c4d5",
"evolving": "f5589f01dd0efd0add0c58e8b27dc73ba3fcd662d9026b3fedbf06c648adb313",
"tail": "29011cc1320d03b3da0121236dc66e6bc391feef4bb1d506a7fb20e769d6a494"
}
2 changes: 2 additions & 0 deletions modules/snapshot_bootstrapper/src/bootstrapper.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
mod configuration;
mod downloader;
mod header;
mod nonces;
mod progress_reader;
mod publisher;

Expand Down
173 changes: 173 additions & 0 deletions modules/snapshot_bootstrapper/src/header.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
#![allow(dead_code, unused)]
use acropolis_common::hash::Hash;
use acropolis_common::Point;
use pallas_primitives::babbage::MintedHeader;
use pallas_primitives::conway::Header as ConwayHeader;
use std::fs;
use std::path::{Path, PathBuf};
use thiserror::Error;

#[derive(Debug, Error)]
pub enum HeaderContextError {
#[error("Failed to read header file {0}: {1}")]
ReadFile(PathBuf, std::io::Error),

#[error("Failed to decode header at slot {0}: {1}")]
Decode(u64, String),

#[error("Origin point has no hash")]
OriginPoint,

#[error("Failed to convert hash: {0}")]
HashConversion(String),
}

#[derive(Debug)]
pub struct HeaderContext {
pub point: Point,
pub block_number: u64,
}

impl HeaderContext {
/// Generate the path for a header file.
/// Returns an error if the point is Origin (has no hash).
pub fn path(network_dir: &Path, point: &Point) -> Result<PathBuf, HeaderContextError> {
let hash = point.hash().ok_or(HeaderContextError::OriginPoint)?;
let filename = format!("header.{}.{}.cbor", point.slot(), hash);
Ok(network_dir.join("headers").join(filename))
}

/// Convert raw hash bytes to our Hash type.
pub fn convert_hash(block_body_hash: &[u8]) -> Result<Hash<32>, HeaderContextError> {
block_body_hash
.try_into()
.map_err(|_| HeaderContextError::HashConversion(format!("{:02x?}", block_body_hash)))
}

/// Load and decode header from `headers/header.{slot}.{hash}.cbor`
pub fn load(network_dir: &Path, point: &Point) -> Result<Self, HeaderContextError> {
let path = Self::path(network_dir, point)?;
let cbor = fs::read(&path).map_err(|e| HeaderContextError::ReadFile(path, e))?;

let minted: MintedHeader<'_> = minicbor::decode(&cbor)
.map_err(|e| HeaderContextError::Decode(point.slot(), e.to_string()))?;
let header = ConwayHeader::from(minted);
Ok(Self {
point: point.clone(),
block_number: header.header_body.block_number,
})
}
}

#[cfg(test)]
mod header_tests {
use super::*;
use std::fs;
use tempfile::TempDir;

fn specific_point(slot: u64, hash_str: &str) -> Point {
Point::Specific {
slot,
hash: hash_str.parse().expect("valid hash"),
}
}

fn setup_headers_dir() -> TempDir {
let temp_dir = TempDir::new().unwrap();
fs::create_dir_all(temp_dir.path().join("headers")).unwrap();
temp_dir
}

const ZERO_HASH: &str = "0000000000000000000000000000000000000000000000000000000000000000";

#[test]
fn path_fails_for_origin_point() {
let result = HeaderContext::path(Path::new("/test"), &Point::Origin);

let err = result.unwrap_err();
assert!(matches!(err, HeaderContextError::OriginPoint));
assert_eq!(err.to_string(), "Origin point has no hash");
}

#[test]
fn path_succeeds_for_specific_point() {
let point = specific_point(42, ZERO_HASH);

let path = HeaderContext::path(Path::new("/test"), &point).unwrap();

assert!(path.ends_with(format!("headers/header.42.{}.cbor", ZERO_HASH)));
}

#[test]
fn convert_hash_fails_for_wrong_length() {
// Too short
assert!(matches!(
HeaderContext::convert_hash(&[0u8; 16]),
Err(HeaderContextError::HashConversion(_))
));

// Too long
assert!(matches!(
HeaderContext::convert_hash(&[0u8; 64]),
Err(HeaderContextError::HashConversion(_))
));
}

#[test]
fn convert_hash_succeeds_for_32_bytes() {
let bytes = [0xab; 32];
assert!(HeaderContext::convert_hash(&bytes).is_ok());
}

#[test]
fn hash_conversion_error_includes_hex_representation() {
let err = HeaderContext::convert_hash(&[0xde, 0xad, 0xbe, 0xef]).unwrap_err();
let msg = err.to_string().to_lowercase();

assert!(msg.contains("de") && msg.contains("ad"));
}

#[test]
fn load_fails_for_origin_point() {
let temp_dir = setup_headers_dir();

let err = HeaderContext::load(temp_dir.path(), &Point::Origin).unwrap_err();

assert!(matches!(err, HeaderContextError::OriginPoint));
}

#[test]
fn load_fails_when_file_missing() {
let temp_dir = setup_headers_dir();
let point = specific_point(12345, ZERO_HASH);

let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();

assert!(matches!(err, HeaderContextError::ReadFile(_, _)));
assert!(err.to_string().contains("header.12345"));
}

#[test]
fn load_fails_for_invalid_cbor() {
let temp_dir = setup_headers_dir();
let point = specific_point(12345, ZERO_HASH);
let path = HeaderContext::path(temp_dir.path(), &point).unwrap();
fs::write(&path, b"not valid cbor").unwrap();

let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();

assert!(matches!(err, HeaderContextError::Decode(12345, _)));
}

#[test]
fn load_fails_for_wrong_cbor_structure() {
let temp_dir = setup_headers_dir();
let point = specific_point(555, ZERO_HASH);
let path = HeaderContext::path(temp_dir.path(), &point).unwrap();
fs::write(&path, minicbor::to_vec(42u64).unwrap()).unwrap();

let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();

assert!(matches!(err, HeaderContextError::Decode(555, _)));
}
}
147 changes: 147 additions & 0 deletions modules/snapshot_bootstrapper/src/nonces.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
#![allow(dead_code, unused)]
use acropolis_common::protocol_params::{Nonce, Nonces};
use acropolis_common::{BlockHash, Point};
use serde::{Deserialize, Deserializer};
use std::fs;
use std::path::{Path, PathBuf};
use thiserror::Error;

#[derive(Debug, Error)]
pub enum NonceContextError {
#[error("Failed to read {0}: {1}")]
ReadFile(PathBuf, std::io::Error),

#[error("Failed to parse {0}: {1}")]
Parse(PathBuf, serde_json::Error),
}

fn deserialize_nonce<'de, D>(deserializer: D) -> Result<Nonce, D::Error>
where
D: Deserializer<'de>,
{
let hash: BlockHash = Deserialize::deserialize(deserializer)?;
Ok(Nonce::from(hash))
}

fn deserialize_point<'de, D>(deserializer: D) -> Result<Point, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
s.split_once('.')
.and_then(|(slot_str, hash_str)| {
Some(Point::Specific {
slot: slot_str.parse().ok()?,
hash: hash_str.parse().ok()?,
})
})
.ok_or_else(|| serde::de::Error::custom("invalid point format"))
}

#[derive(Debug, Deserialize)]
pub struct NonceContext {
#[serde(deserialize_with = "deserialize_point")]
pub at: Point,
#[serde(deserialize_with = "deserialize_nonce")]
pub active: Nonce,
#[serde(deserialize_with = "deserialize_nonce")]
pub candidate: Nonce,
#[serde(deserialize_with = "deserialize_nonce")]
pub evolving: Nonce,
#[serde(deserialize_with = "deserialize_nonce")]
pub tail: Nonce,
}

impl NonceContext {
pub fn path(network_dir: &Path) -> PathBuf {
network_dir.join("nonces.json")
}

pub fn load(network_dir: &Path) -> Result<Self, NonceContextError> {
let path = Self::path(network_dir);
let content =
fs::read_to_string(&path).map_err(|e| NonceContextError::ReadFile(path.clone(), e))?;
serde_json::from_str(&content).map_err(|e| NonceContextError::Parse(path, e))
}

pub fn into_nonces(self, epoch: u64, lab_hash: BlockHash) -> Nonces {
Nonces {
epoch,
active: self.active,
evolving: self.evolving,
candidate: self.candidate,
lab: Nonce::from(lab_hash),
prev_lab: self.tail,
}
}
}

#[cfg(test)]
mod nonces_tests {
use super::*;
use std::fs;
use tempfile::TempDir;

const ZERO_HASH: &str = "0000000000000000000000000000000000000000000000000000000000000000";

fn valid_json_with_point(point: &str) -> String {
format!(
r#"{{
"at": "{point}",
"active": "{ZERO_HASH}",
"candidate": "{ZERO_HASH}",
"evolving": "{ZERO_HASH}",
"tail": "{ZERO_HASH}"
}}"#
)
}

#[test]
fn load_fails_when_file_missing() {
let temp_dir = TempDir::new().unwrap();

let err = NonceContext::load(temp_dir.path()).unwrap_err();

assert!(matches!(err, NonceContextError::ReadFile(_, _)));
assert!(err.to_string().contains("nonces.json"));
}

#[test]
fn load_fails_for_invalid_json() {
let temp_dir = TempDir::new().unwrap();
fs::write(NonceContext::path(temp_dir.path()), "not valid json {{{").unwrap();

let err = NonceContext::load(temp_dir.path()).unwrap_err();

assert!(matches!(err, NonceContextError::Parse(_, _)));
}

#[test]
fn load_fails_when_missing_required_fields() {
let temp_dir = TempDir::new().unwrap();
fs::write(NonceContext::path(temp_dir.path()), r#"{"at": "123.abc"}"#).unwrap();

let err = NonceContext::load(temp_dir.path()).unwrap_err();

assert!(matches!(err, NonceContextError::Parse(_, _)));
}

#[test]
fn load_fails_for_invalid_point_format() {
let temp_dir = TempDir::new().unwrap();

let bad_case = format!("not_a_number.{ZERO_HASH}").clone();
let cases = ["no_dot_separator", bad_case.as_str()];

for invalid_point in cases {
fs::write(
NonceContext::path(temp_dir.path()),
valid_json_with_point(invalid_point),
)
.unwrap();

let err = NonceContext::load(temp_dir.path()).unwrap_err();
assert!(matches!(err, NonceContextError::Parse(_, _)));
}
}
}