diff --git a/Cargo.lock b/Cargo.lock index ddda2a491b..1cd0c518aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7534,6 +7534,7 @@ dependencies = [ "sp-consensus-subspace", "sp-runtime", "sp-std", + "subspace-archiving", "subspace-core-primitives", "subspace-solving", "subspace-verification", diff --git a/crates/sp-lightclient/Cargo.toml b/crates/sp-lightclient/Cargo.toml index f9d019330b..ab5fc4c5d4 100644 --- a/crates/sp-lightclient/Cargo.toml +++ b/crates/sp-lightclient/Cargo.toml @@ -30,6 +30,7 @@ subspace-verification = { version = "0.1.0", path = "../subspace-verification", [dev-dependencies] frame-support = { version = "4.0.0-dev", git = "https://github.com/subspace/substrate", rev = "1399afdcd8ab4d7fad149c96f9cadcaf04b94f86" } rand = { version = "0.8.5", features = ["min_const_gen"] } +subspace-archiving = { version = "0.1.0", path = "../subspace-archiving"} [features] default = ["std"] diff --git a/crates/sp-lightclient/src/lib.rs b/crates/sp-lightclient/src/lib.rs index 264a174d05..4a96c4f8a8 100644 --- a/crates/sp-lightclient/src/lib.rs +++ b/crates/sp-lightclient/src/lib.rs @@ -30,10 +30,15 @@ use sp_consensus_subspace::{FarmerPublicKey, FarmerSignature}; use sp_runtime::traits::Header as HeaderT; use sp_runtime::ArithmeticError; use sp_std::cmp::Ordering; +use sp_std::collections::btree_map::BTreeMap; use std::marker::PhantomData; -use subspace_core_primitives::{PublicKey, Randomness, RewardSignature, Salt}; +use subspace_core_primitives::{ + PublicKey, Randomness, RewardSignature, Salt, Sha256Hash, MERKLE_NUM_LEAVES, RECORD_SIZE, +}; use subspace_solving::{derive_global_challenge, derive_target, REWARD_SIGNING_CONTEXT}; -use subspace_verification::{check_reward_signature, verify_solution, VerifySolutionParams}; +use subspace_verification::{ + check_reward_signature, verify_solution, PieceCheckParams, VerifySolutionParams, +}; #[cfg(test)] mod tests; @@ -48,6 +53,12 @@ type SolutionRange = u64; /// BlockWeight type for fork choice rules. type BlockWeight = u128; +/// Segment index type. +type SegmentIndex = u64; + +/// Records root type. +type RecordsRoot = Sha256Hash; + /// Chain constants. #[derive(Debug, Clone)] pub struct ChainConstants { @@ -57,6 +68,13 @@ pub struct ChainConstants { /// Genesis digest items at the start of the chain since the genesis block will not have any digests /// to verify the Block #1 digests. pub genesis_digest_items: NextDigestItems, + + /// Maximum number of pieces in a given plot. + pub max_plot_size: u64, + + /// Genesis block records roots to verify the Block #1 and other block solutions until Block #1 is finalized. + /// When Block #1 is finalized, these records roots are present in Block #1 are stored in the storage. + pub genesis_records_roots: BTreeMap, } /// HeaderExt describes an extended block chain header at a specific height along with some computed values. @@ -158,6 +176,15 @@ pub trait Storage { /// Returns the latest finalized header. fn finalized_header(&self) -> HeaderExt
; + + /// Stores records roots for fast retrieval by segment index at or below finalized header. + fn store_records_roots(&mut self, records_roots: BTreeMap); + + /// Returns a records root for a given segment index. + fn records_root(&self, segment_index: SegmentIndex) -> Option; + + /// Returns the stored segment count. + fn number_of_segments(&self) -> u64; } /// Error type that holds the current finalized number and the header number we are trying to import. @@ -174,6 +201,8 @@ pub enum ImportError { HeaderAlreadyImported, /// Missing parent header. MissingParent(HashOf
), + /// Missing header associated with hash. + MissingHeader(HashOf
), /// Missing ancestor header at the number. MissingAncestorHeader(HashOf
, NumberOf
), /// Error while extracting digests from header. @@ -192,6 +221,8 @@ pub enum ImportError { SwitchedToForkBelowArchivingDepth, /// Header being imported is below the archiving depth. HeaderIsBelowArchivingDepth(HeaderBelowArchivingDepthError
), + /// Missing records root for a given segment index. + MissingRecordsRoot(SegmentIndex), } impl From for ImportError
{ @@ -263,6 +294,13 @@ impl> HeaderImporter { Self::verify_block_signature(&mut header, &pre_digest.solution.public_key)?; // verify solution + let max_plot_size = self.store.chain_constants().max_plot_size; + let segment_index = pre_digest.solution.piece_index / u64::from(MERKLE_NUM_LEAVES); + let position = pre_digest.solution.piece_index % u64::from(MERKLE_NUM_LEAVES); + let records_root = + self.find_records_root_for_segment_index(segment_index, parent_header.header.hash())?; + let total_pieces = self.total_pieces(parent_header.header.hash())?; + verify_solution( &pre_digest.solution, pre_digest.slot.into(), @@ -270,8 +308,13 @@ impl> HeaderImporter { global_randomness: &global_randomness, solution_range, salt, - // TODO(ved): verify POAS once we have access to record root - piece_check_params: None, + piece_check_params: Some(PieceCheckParams { + records_root, + position, + record_size: RECORD_SIZE, + max_plot_size, + total_pieces, + }), }, ) .map_err(ImportError::InvalidSolution)?; @@ -296,7 +339,6 @@ impl> HeaderImporter { } }; - // TODO(ved): extract record roots from the header // TODO(ved); extract an equivocations from the header // store header @@ -496,6 +538,117 @@ impl> HeaderImporter { Ok(()) } + /// Returns the total pieces on chain where chain_tip is the hash of the tip of the chain. + /// We count the total segments to calculate total pieces as follows, + /// - Fetch the segment count from the store. + /// - Count the segments from each header that is not finalized. + fn total_pieces(&self, chain_tip: HashOf
) -> Result> { + // fetch the segment count from the store + let records_roots_count_till_finalized_header = self.store.number_of_segments(); + + let finalized_header = self.store.finalized_header(); + let mut records_roots_count = records_roots_count_till_finalized_header; + + // special case when Block #1 is not finalized yet, then include the genesis segment count + if finalized_header.header.number().is_zero() { + records_roots_count += self.store.chain_constants().genesis_records_roots.len() as u64; + } + + // calculate segment count present in each header from header till finalized header + let mut header = self + .store + .header(chain_tip) + .ok_or(ImportError::MissingHeader(chain_tip))?; + + while header.header.hash() != finalized_header.header.hash() { + let digest_items = extract_subspace_digest_items::< + _, + FarmerPublicKey, + FarmerPublicKey, + FarmerSignature, + >(&header.header)?; + records_roots_count += digest_items.records_roots.len() as u64; + + header = self + .store + .header(*header.header.parent_hash()) + .ok_or_else(|| ImportError::MissingParent(header.header.hash()))?; + } + + Ok(records_roots_count * u64::from(MERKLE_NUM_LEAVES)) + } + + /// Finds a records root mapped against a segment index in the chain with chain_tip as the tip of the chain. + /// We try to find the records root as follows, + /// - Find records root from the store and return if found. + /// - Find records root from the genesis record roots and return if found. + /// - Find the records root present in the non finalized headers. + fn find_records_root_for_segment_index( + &self, + segment_index: SegmentIndex, + chain_tip: HashOf
, + ) -> Result> { + // check if the records root is already in the store + if let Some(records_root) = self.store.records_root(segment_index) { + return Ok(records_root); + }; + + // special case: check the genesis records roots if the Block #1 is not finalized yet + if let Some(records_root) = self + .store + .chain_constants() + .genesis_records_roots + .get(&segment_index) + { + return Ok(*records_root); + } + + // find the records root from the headers which are not finalized yet. + let finalized_header = self.store.finalized_header(); + let mut header = self + .store + .header(chain_tip) + .ok_or(ImportError::MissingHeader(chain_tip))?; + + while header.header.hash() != finalized_header.header.hash() { + let digest_items = extract_subspace_digest_items::< + _, + FarmerPublicKey, + FarmerPublicKey, + FarmerSignature, + >(&header.header)?; + + if let Some(records_root) = digest_items.records_roots.get(&segment_index) { + return Ok(*records_root); + } + + header = self + .store + .header(*header.header.parent_hash()) + .ok_or_else(|| ImportError::MissingParent(header.header.hash()))?; + } + + Err(ImportError::MissingRecordsRoot(segment_index)) + } + + /// Stores finalized header and records roots present in the header. + fn store_finalized_header_and_records_roots( + &mut self, + header: &Header, + ) -> Result<(), ImportError
> { + let digests_items = + extract_subspace_digest_items::<_, FarmerPublicKey, FarmerPublicKey, FarmerSignature>( + header, + )?; + + // mark header as finalized + self.store.finalize_header(header.hash()); + + // store the records roots present in the header digests + self.store.store_records_roots(digests_items.records_roots); + Ok(()) + } + /// Finalize the header at K-depth from the best block and prune remaining forks at that number. /// We want to finalize the header from the current finalized header until the K-depth number of the best. /// 1. In an ideal scenario, the current finalized head is one number less than number to be finalized. @@ -551,7 +704,7 @@ impl> HeaderImporter { .first() .expect("First item must exist as the len is 1."); - self.store.finalize_header(header_to_finalize.header.hash()); + self.store_finalized_header_and_records_roots(&header_to_finalize.header)? } else { // there are multiple headers at the number to be finalized. // find the correct ancestor header of the current best header. @@ -585,7 +738,7 @@ impl> HeaderImporter { } // mark the header as finalized - self.store.finalize_header(header_to_finalize.header.hash()) + self.store_finalized_header_and_records_roots(&header_to_finalize.header)? } } diff --git a/crates/sp-lightclient/src/mock.rs b/crates/sp-lightclient/src/mock.rs index 3a5439f4cb..77a1809266 100644 --- a/crates/sp-lightclient/src/mock.rs +++ b/crates/sp-lightclient/src/mock.rs @@ -1,9 +1,12 @@ -use crate::{BlockWeight, ChainConstants, HashOf, HeaderExt, NumberOf, SolutionRange, Storage}; +use crate::{ + BlockWeight, ChainConstants, HashOf, HeaderExt, NumberOf, RecordsRoot, SegmentIndex, + SolutionRange, Storage, +}; use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_arithmetic::traits::Zero; use sp_runtime::traits::{BlakeTwo256, Header as HeaderT}; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; pub(crate) type Header = sp_runtime::generic::Header; @@ -14,6 +17,7 @@ struct StorageData { number_to_hashes: HashMap, Vec>>, best_header: (NumberOf
, HashOf
), finalized_head: Option<(NumberOf
, HashOf
)>, + records_roots: BTreeMap, } #[derive(Default, Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] @@ -106,6 +110,18 @@ impl Storage
for MockStorage { .unwrap() }) } + + fn store_records_roots(&mut self, mut records_roots: BTreeMap) { + self.0.records_roots.append(&mut records_roots) + } + + fn records_root(&self, segment_index: SegmentIndex) -> Option { + self.0.records_roots.get(&segment_index).cloned() + } + + fn number_of_segments(&self) -> u64 { + self.0.records_roots.len() as u64 + } } impl MockStorage { @@ -116,6 +132,7 @@ impl MockStorage { number_to_hashes: Default::default(), best_header: (Default::default(), Default::default()), finalized_head: None, + records_roots: Default::default(), }) } @@ -141,4 +158,13 @@ impl MockStorage { header.total_weight = weight; self.0.headers.insert(hash, header); } + + // hack to store records roots + pub(crate) fn store_records_root( + &mut self, + segment_index: SegmentIndex, + records_root: RecordsRoot, + ) { + self.0.records_roots.insert(segment_index, records_root); + } } diff --git a/crates/sp-lightclient/src/tests.rs b/crates/sp-lightclient/src/tests.rs index af745060d3..86bffd39e7 100644 --- a/crates/sp-lightclient/src/tests.rs +++ b/crates/sp-lightclient/src/tests.rs @@ -1,9 +1,11 @@ use crate::mock::{Header, MockStorage}; use crate::{ ChainConstants, HashOf, HeaderExt, HeaderImporter, ImportError, NextDigestItems, NumberOf, - SolutionRange, Storage, + RecordsRoot, SegmentIndex, SolutionRange, Storage, }; use frame_support::{assert_err, assert_ok}; +use rand::rngs::StdRng; +use rand::{Rng, SeedableRng}; use schnorrkel::Keypair; use sp_consensus_subspace::digests::{ extract_subspace_digest_items, CompatibleDigestItem, PreDigest, SubspaceDigestItems, @@ -12,10 +14,13 @@ use sp_consensus_subspace::{FarmerPublicKey, FarmerSignature}; use sp_runtime::app_crypto::UncheckedFrom; use sp_runtime::{Digest, DigestItem}; use std::cmp::Ordering; -use subspace_core_primitives::{Piece, Randomness, Salt, Solution, Tag, PIECE_SIZE}; +use subspace_archiving::archiver::Archiver; +use subspace_core_primitives::{ + Piece, Randomness, Salt, Solution, Tag, PIECE_SIZE, RECORDED_HISTORY_SEGMENT_SIZE, RECORD_SIZE, +}; use subspace_solving::{ create_tag, create_tag_signature, derive_global_challenge, derive_local_challenge, - derive_target, REWARD_SIGNING_CONTEXT, + derive_target, SubspaceCodec, REWARD_SIGNING_CONTEXT, }; fn default_randomness_and_salt() -> (Randomness, Salt) { @@ -33,6 +38,8 @@ fn default_test_constants() -> ChainConstants
{ next_solution_range: Default::default(), next_salt: salt, }, + max_plot_size: 100 * 1024 * 1024 * 1024 / PIECE_SIZE as u64, + genesis_records_roots: Default::default(), } } @@ -43,8 +50,47 @@ fn derive_solution_range(target: Tag, tag: Tag) -> SolutionRange { subspace_core_primitives::bidirectional_distance(&target, &tag) * 2 } -fn random_piece() -> Piece { - rand::random::<[u8; PIECE_SIZE]>().into() +fn valid_piece(pub_key: schnorrkel::PublicKey) -> (Piece, u64, SegmentIndex, RecordsRoot) { + // we don't care about the block data + let mut rng = StdRng::seed_from_u64(0); + let mut block = vec![0u8; RECORDED_HISTORY_SEGMENT_SIZE as usize]; + rng.fill(block.as_mut_slice()); + + let mut archiver = + Archiver::new(RECORD_SIZE as usize, RECORDED_HISTORY_SEGMENT_SIZE as usize).unwrap(); + + let archived_segment = archiver + .add_block(block, Default::default()) + .first() + .cloned() + .unwrap(); + + let (position, piece) = archived_segment + .pieces + .as_pieces() + .enumerate() + .collect::>() + .first() + .cloned() + .unwrap(); + + assert!(subspace_archiving::archiver::is_piece_valid( + piece, + archived_segment.root_block.records_root(), + position, + RECORD_SIZE as usize, + )); + + let codec = SubspaceCodec::new(pub_key.as_ref()); + let mut piece = piece.to_vec(); + codec.encode(&mut piece, position as u64).unwrap(); + + ( + Piece::try_from(piece.as_slice()).unwrap(), + position as u64, + archived_segment.root_block.segment_index(), + archived_segment.root_block.records_root(), + ) } fn valid_header_with_default_randomness_and_salt( @@ -52,7 +98,7 @@ fn valid_header_with_default_randomness_and_salt( number: NumberOf
, slot: u64, keypair: &Keypair, -) -> (Header, SolutionRange) { +) -> (Header, SolutionRange, SegmentIndex, RecordsRoot) { let (randomness, salt) = default_randomness_and_salt(); valid_header(parent_hash, number, slot, keypair, randomness, salt) } @@ -64,8 +110,8 @@ fn valid_header( keypair: &Keypair, randomness: Randomness, salt: Salt, -) -> (Header, SolutionRange) { - let encoding = random_piece(); +) -> (Header, SolutionRange, SegmentIndex, RecordsRoot) { + let (encoding, piece_index, segment_index, records_root) = valid_piece(keypair.public); let tag: Tag = create_tag(encoding.as_ref(), salt); let global_challenge = derive_global_challenge(&randomness, slot); let local_challenge = derive_local_challenge(keypair, global_challenge); @@ -87,7 +133,7 @@ fn valid_header( solution: Solution { public_key: FarmerPublicKey::unchecked_from(keypair.public.to_bytes()), reward_address: FarmerPublicKey::unchecked_from(keypair.public.to_bytes()), - piece_index: 0, + piece_index, encoding, tag_signature: create_tag_signature(keypair, tag), local_challenge, @@ -111,7 +157,7 @@ fn valid_header( .logs .push(DigestItem::subspace_seal(signature)); - (header, solution_range) + (header, solution_range, segment_index, records_root) } fn import_blocks_until( @@ -123,7 +169,7 @@ fn import_blocks_until( let mut parent_hash = Default::default(); let mut slot = start_slot; for block_number in 0..=number { - let (header, _solution_range) = + let (header, _solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, block_number, slot, keypair); parent_hash = header.hash(); slot += 1; @@ -134,6 +180,7 @@ fn import_blocks_until( test_overrides: Default::default(), }; store.store_header(header_ext, true); + store.store_records_root(segment_index, records_root) } (parent_hash, slot) @@ -145,8 +192,9 @@ fn test_header_import_missing_parent() { let mut store = MockStorage::new(constants); let keypair = Keypair::generate(); let (_parent_hash, next_slot) = import_blocks_until(&mut store, 0, 0, &keypair); - let (header, _) = + let (header, _, segment_index, records_root) = valid_header_with_default_randomness_and_salt(Default::default(), 1, next_slot, &keypair); + store.store_records_root(segment_index, records_root); let mut importer = HeaderImporter::new(store); assert_err!( importer.import_header(header.clone()), @@ -164,18 +212,21 @@ fn header_import_reorg_at_same_height(new_header_weight: Ordering) { let mut importer = HeaderImporter::new(store); // import block 3 - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, 3, next_slot, &keypair); importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); assert_ok!(importer.import_header(header.clone())); let best_header_ext = importer.store.best_header(); assert_eq!(best_header_ext.header, header); let mut best_header = header; // try an import another fork at 3 - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, 3, next_slot + 1, &keypair); let digests: SubspaceDigestItems = extract_subspace_digest_items(&header).unwrap(); @@ -186,6 +237,9 @@ fn header_import_reorg_at_same_height(new_header_weight: Ordering) { importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); match new_header_weight { Ordering::Less => { importer @@ -263,11 +317,15 @@ fn test_header_import_success() { let mut slot = next_slot; let mut parent_hash = parent_hash; for number in 3..=10 { - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, number, slot, &keypair); importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); + let res = importer.import_header(header.clone()); assert_ok!(res); // best header should be correct @@ -297,7 +355,7 @@ fn create_fork_chain_from( let mut parent_hash = parent_hash; let mut next_slot = slot + 1; for number in from..=until { - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, number, next_slot, keypair); let digests: SubspaceDigestItems = extract_subspace_digest_items(&header).unwrap(); @@ -308,6 +366,9 @@ fn create_fork_chain_from( importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); importer .store .override_cumulative_weight(best_header_ext.header.hash(), new_weight + 1); @@ -349,11 +410,14 @@ fn test_finalized_chain_reorg_to_longer_chain() { ensure_finalized_heads_have_no_forks(&importer.store, 0); // add new best header at 5 - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, 5, next_slot, &keypair); importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); let res = importer.import_header(header.clone()); assert_ok!(res); let best_header = importer.store.best_header(); @@ -378,12 +442,13 @@ fn test_finalized_chain_reorg_to_longer_chain() { ensure_finalized_heads_have_no_forks(&importer.store, 1); // import a new head to the fork chain and make it the best. - let (header, solution_range) = valid_header_with_default_randomness_and_salt( - fork_parent_hash, - 9, - fork_next_slot, - &keypair, - ); + let (header, solution_range, segment_index, records_root) = + valid_header_with_default_randomness_and_salt( + fork_parent_hash, + 9, + fork_next_slot, + &keypair, + ); let digests: SubspaceDigestItems = extract_subspace_digest_items(&header).unwrap(); let new_weight = HeaderImporter::::calculate_block_weight( @@ -393,6 +458,9 @@ fn test_finalized_chain_reorg_to_longer_chain() { importer .store .override_solution_range(fork_parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); importer .store .override_cumulative_weight(importer.store.best_header().header.hash(), new_weight - 1); @@ -424,11 +492,14 @@ fn test_reorg_to_heavier_smaller_chain() { let mut parent_hash = parent_hash; let fork_parent_hash = parent_hash; for number in 3..=5 { - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(parent_hash, number, slot, &keypair); importer .store .override_solution_range(parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); let res = importer.import_header(header.clone()); assert_ok!(res); // best header should be correct @@ -446,7 +517,7 @@ fn test_reorg_to_heavier_smaller_chain() { ensure_finalized_heads_have_no_forks(&importer.store, 1); // now import a fork header 3 that becomes canonical - let (header, solution_range) = + let (header, solution_range, segment_index, records_root) = valid_header_with_default_randomness_and_salt(fork_parent_hash, 3, next_slot + 1, &keypair); let digests: SubspaceDigestItems = extract_subspace_digest_items(&header).unwrap(); @@ -457,6 +528,9 @@ fn test_reorg_to_heavier_smaller_chain() { importer .store .override_solution_range(fork_parent_hash, solution_range); + importer + .store + .store_records_root(segment_index, records_root); importer .store .override_cumulative_weight(importer.store.best_header().header.hash(), new_weight - 1);