From af32f96f36a32235daf7e3b1d9694af7edcf5f8e Mon Sep 17 00:00:00 2001 From: Hansie Odendaal <39146854+hansieodendaal@users.noreply.github.com> Date: Mon, 26 Jun 2023 08:19:25 +0200 Subject: [PATCH] feat: proof of work audit part 2 (#5495) Description --- - Implemented creation, underflow and overflow protection for `Difficulty` in `difficulty.rs`. - Removed all methods not adhering to these patterns/traits. - Fixed usage in the code to adhere to these. - Consolidated and refactored repetitive code in `difficulty.rs`. Motivation and Context --- Preparation for code audit How Has This Been Tested? --- Added unit tests All unit tests pass What process can a PR reviewer use to test or verify this change? --- Code walkthrough Review the new unit test and related code Breaking Changes --- - [x] None - [ ] Requires data directory on base node to be deleted - [ ] Requires hard fork - [ ] Other - Please specify --- .../tari_base_node/src/grpc/hash_rate.rs | 8 +- .../src/block_template_protocol.rs | 2 +- .../tari_merge_mining_proxy/src/error.rs | 7 +- applications/tari_miner/src/difficulty.rs | 17 +- applications/tari_miner/src/miner.rs | 19 +- .../core/src/blocks/accumulated_data.rs | 12 +- base_layer/core/src/blocks/error.rs | 2 + base_layer/core/src/blocks/genesis_block.rs | 34 +- .../src/chain_storage/blockchain_database.rs | 30 +- .../tests/blockchain_database.rs | 7 +- .../core/src/consensus/consensus_constants.rs | 54 ++-- .../core/src/proof_of_work/difficulty.rs | 290 +++++++++++------- base_layer/core/src/proof_of_work/error.rs | 6 + .../core/src/proof_of_work/lwma_diff.rs | 84 ++--- base_layer/core/src/proof_of_work/mod.rs | 12 +- .../core/src/proof_of_work/monero_rx/error.rs | 4 + .../src/proof_of_work/monero_rx/helpers.rs | 3 +- base_layer/core/src/proof_of_work/sha3_pow.rs | 16 +- .../proof_of_work/target_difficulty_window.rs | 21 +- base_layer/core/src/proto/block.rs | 14 +- .../core/src/test_helpers/block_spec.rs | 4 +- .../core/src/test_helpers/blockchain.rs | 3 +- base_layer/core/src/test_helpers/mod.rs | 11 +- .../transaction_components/wallet_output.rs | 1 + .../core/src/validation/block_body/test.rs | 7 +- base_layer/core/src/validation/error.rs | 4 +- base_layer/core/src/validation/helpers.rs | 2 +- base_layer/core/src/validation/mocks.rs | 6 +- .../chain_storage_tests/chain_storage.rs | 42 +-- .../core/tests/helpers/block_builders.rs | 32 +- base_layer/core/tests/helpers/database.rs | 3 +- .../core/tests/helpers/test_blockchain.rs | 6 +- .../core/tests/tests/block_validation.rs | 35 ++- base_layer/core/tests/tests/mempool.rs | 6 +- .../core/tests/tests/node_comms_interface.rs | 66 +++- base_layer/core/tests/tests/node_service.rs | 23 +- .../core/tests/tests/node_state_machine.rs | 15 +- .../mmr/src/sparse_merkle_tree/proofs.rs | 21 +- base_layer/tari_mining_helper_ffi/src/lib.rs | 37 ++- 39 files changed, 593 insertions(+), 373 deletions(-) diff --git a/applications/tari_base_node/src/grpc/hash_rate.rs b/applications/tari_base_node/src/grpc/hash_rate.rs index f270abc8a3..13cf84254b 100644 --- a/applications/tari_base_node/src/grpc/hash_rate.rs +++ b/applications/tari_base_node/src/grpc/hash_rate.rs @@ -125,13 +125,13 @@ mod test { // we check that the window is not full when we insert less items than the window size for _ in 0..window_size - 1 { - hash_rate_ma.add(0, Difficulty::from(0)); + hash_rate_ma.add(0, Difficulty::min()); assert!(!hash_rate_ma.is_full()); } // from this point onwards, the window should be always full for _ in 0..10 { - hash_rate_ma.add(0, Difficulty::from(0)); + hash_rate_ma.add(0, Difficulty::min()); assert!(hash_rate_ma.is_full()); } } @@ -175,7 +175,7 @@ mod test { let window_size = hash_rate_ma.window_size; for _ in 0..window_size { - hash_rate_ma.add(0, Difficulty::from(u64::MAX)); + hash_rate_ma.add(0, Difficulty::max()); } } @@ -193,7 +193,7 @@ mod test { difficulty: u64, expected_hash_rate: u64, ) { - moving_average.add(height, Difficulty::from(difficulty)); + moving_average.add(height, Difficulty::from_u64(difficulty).unwrap()); assert_eq!(moving_average.average(), expected_hash_rate); } } diff --git a/applications/tari_merge_mining_proxy/src/block_template_protocol.rs b/applications/tari_merge_mining_proxy/src/block_template_protocol.rs index e0d95fcab3..363525ddc8 100644 --- a/applications/tari_merge_mining_proxy/src/block_template_protocol.rs +++ b/applications/tari_merge_mining_proxy/src/block_template_protocol.rs @@ -253,7 +253,7 @@ impl BlockTemplateProtocol<'_> { ); Ok(FinalBlockTemplateData { template: block_template_data, - target_difficulty: mining_difficulty.into(), + target_difficulty: Difficulty::from_u64(mining_difficulty)?, blockhashing_blob, blocktemplate_blob, merge_mining_hash: tari_block.merge_mining_hash, diff --git a/applications/tari_merge_mining_proxy/src/error.rs b/applications/tari_merge_mining_proxy/src/error.rs index 0ea0579fb6..ce11bc3e0c 100644 --- a/applications/tari_merge_mining_proxy/src/error.rs +++ b/applications/tari_merge_mining_proxy/src/error.rs @@ -28,7 +28,10 @@ use hex::FromHexError; use hyper::header::InvalidHeaderValue; // use tari_app_grpc::authentication::BasicAuthError; use tari_common::{ConfigError, ConfigurationError}; -use tari_core::{proof_of_work::monero_rx::MergeMineError, transactions::CoinbaseBuildError}; +use tari_core::{ + proof_of_work::{monero_rx::MergeMineError, DifficultyError}, + transactions::CoinbaseBuildError, +}; use tari_wallet_grpc_client::BasicAuthError; use thiserror::Error; use tonic::{codegen::http::uri::InvalidUri, transport}; @@ -92,6 +95,8 @@ pub enum MmProxyError { ConversionError(String), #[error("No reachable servers in configuration")] ServersUnavailable, + #[error("Invalid difficulty: {0}")] + DifficultyError(#[from] DifficultyError), } impl From for MmProxyError { diff --git a/applications/tari_miner/src/difficulty.rs b/applications/tari_miner/src/difficulty.rs index 48f674e08c..e1351af4df 100644 --- a/applications/tari_miner/src/difficulty.rs +++ b/applications/tari_miner/src/difficulty.rs @@ -23,7 +23,10 @@ use std::convert::TryInto; use tari_app_grpc::tari_rpc::BlockHeader as grpc_header; -use tari_core::{blocks::BlockHeader, proof_of_work::sha3x_difficulty}; +use tari_core::{ + blocks::BlockHeader, + proof_of_work::{sha3x_difficulty, DifficultyError}, +}; use tari_utilities::epoch_time::EpochTime; use crate::errors::MinerError; @@ -65,9 +68,9 @@ impl BlockHeaderSha3 { } #[inline] - pub fn difficulty(&mut self) -> Difficulty { + pub fn difficulty(&mut self) -> Result { self.hashes = self.hashes.saturating_add(1); - sha3x_difficulty(&self.header).into() + Ok(sha3x_difficulty(&self.header)?.as_u64()) } #[allow(clippy::cast_possible_wrap)] @@ -112,8 +115,8 @@ pub mod test { let mut hasher = BlockHeaderSha3::new(header).unwrap(); for _ in 0..1000 { assert_eq!( - hasher.difficulty(), - core_sha3_difficulty(&core_header).as_u64(), + hasher.difficulty().unwrap(), + core_sha3_difficulty(&core_header).unwrap().as_u64(), "with nonces = {}:{}", hasher.header.nonce, core_header.nonce @@ -132,8 +135,8 @@ pub mod test { let mut timestamp = core_header.timestamp; for _ in 0..1000 { assert_eq!( - hasher.difficulty(), - core_sha3_difficulty(&core_header).as_u64(), + hasher.difficulty().unwrap(), + core_sha3_difficulty(&core_header).unwrap().as_u64(), "with timestamp = {}", timestamp ); diff --git a/applications/tari_miner/src/miner.rs b/applications/tari_miner/src/miner.rs index 39d94013a4..8d6eaa121b 100644 --- a/applications/tari_miner/src/miner.rs +++ b/applications/tari_miner/src/miner.rs @@ -21,6 +21,7 @@ // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // use std::{ + panic::panic_any, pin::Pin, task::{Context, Poll, Waker}, thread, @@ -178,13 +179,27 @@ pub fn mining_task( share_mode: bool, ) { let start = Instant::now(); - let mut hasher = BlockHeaderSha3::new(header).unwrap(); + let mut hasher = match BlockHeaderSha3::new(header) { + Ok(hasher) => hasher, + Err(err) => { + let err = format!("Miner {} failed to create hasher: {:?}", miner, err); + error!(target: LOG_TARGET, "{}", err); + panic_any(err); + }, + }; hasher.random_nonce(); // We're mining over here! trace!(target: LOG_TARGET, "Mining thread {} started", miner); // Mining work loop { - let difficulty = hasher.difficulty(); + let difficulty = match hasher.difficulty() { + Ok(difficulty) => difficulty, + Err(err) => { + let err = format!("Miner {} failed to calculate difficulty: {:?}", miner, err); + error!(target: LOG_TARGET, "{}", err); + panic_any(err); + }, + }; if difficulty >= target_difficulty { debug!( target: LOG_TARGET, diff --git a/base_layer/core/src/blocks/accumulated_data.rs b/base_layer/core/src/blocks/accumulated_data.rs index b687a45843..3c90d14720 100644 --- a/base_layer/core/src/blocks/accumulated_data.rs +++ b/base_layer/core/src/blocks/accumulated_data.rs @@ -43,7 +43,7 @@ use tari_utilities::hex::Hex; use crate::{ blocks::{error::BlockError, Block, BlockHeader}, - proof_of_work::{AchievedTargetDifficulty, Difficulty, PowAlgorithm}, + proof_of_work::{difficulty::CheckedAdd, AchievedTargetDifficulty, Difficulty, PowAlgorithm}, transactions::aggregated_body::AggregateBody, }; @@ -286,12 +286,18 @@ impl BlockHeaderAccumulatedDataBuilder<'_> { let (monero_diff, blake_diff) = match achieved_target.pow_algo() { PowAlgorithm::Monero => ( - previous_accum.accumulated_monero_difficulty + achieved_target.achieved(), + previous_accum + .accumulated_monero_difficulty + .checked_add(achieved_target.achieved()) + .ok_or(BlockError::DifficultyOverflow)?, previous_accum.accumulated_sha_difficulty, ), PowAlgorithm::Sha3 => ( previous_accum.accumulated_monero_difficulty, - previous_accum.accumulated_sha_difficulty + achieved_target.achieved(), + previous_accum + .accumulated_sha_difficulty + .checked_add(achieved_target.achieved()) + .ok_or(BlockError::DifficultyOverflow)?, ), }; diff --git a/base_layer/core/src/blocks/error.rs b/base_layer/core/src/blocks/error.rs index acc9650e23..acf2cfabba 100644 --- a/base_layer/core/src/blocks/error.rs +++ b/base_layer/core/src/blocks/error.rs @@ -30,4 +30,6 @@ pub enum BlockError { HistoricalBlockContainsPrunedTxos, #[error("Chain block invariant error: {0}")] ChainBlockInvariantError(String), + #[error("Adding difficulties overflowed")] + DifficultyOverflow, } diff --git a/base_layer/core/src/blocks/genesis_block.rs b/base_layer/core/src/blocks/genesis_block.rs index fc58f2dc17..30b9b48638 100644 --- a/base_layer/core/src/blocks/genesis_block.rs +++ b/base_layer/core/src/blocks/genesis_block.rs @@ -29,7 +29,7 @@ use tari_crypto::tari_utilities::hex::*; use crate::{ blocks::{block::Block, BlockHeader, BlockHeaderAccumulatedData, ChainBlock}, - proof_of_work::{PowAlgorithm, ProofOfWork}, + proof_of_work::{Difficulty, PowAlgorithm, ProofOfWork}, transactions::{aggregated_body::AggregateBody, transaction_components::TransactionOutput}, }; @@ -120,11 +120,11 @@ pub fn get_stagenet_genesis_block() -> ChainBlock { let accumulated_data = BlockHeaderAccumulatedData { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() } @@ -158,11 +158,11 @@ pub fn get_nextnet_genesis_block() -> ChainBlock { let accumulated_data = BlockHeaderAccumulatedData { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() } @@ -204,11 +204,11 @@ pub fn get_igor_genesis_block() -> ChainBlock { let accumulated_data = BlockHeaderAccumulatedData { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() } @@ -246,11 +246,11 @@ pub fn get_esmeralda_genesis_block() -> ChainBlock { let accumulated_data = BlockHeaderAccumulatedData { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() } diff --git a/base_layer/core/src/chain_storage/blockchain_database.rs b/base_layer/core/src/chain_storage/blockchain_database.rs index 24b4b9c707..7ba38ec5ee 100644 --- a/base_layer/core/src/chain_storage/blockchain_database.rs +++ b/base_layer/core/src/chain_storage/blockchain_database.rs @@ -2514,7 +2514,7 @@ mod test { ConsensusConstantsBuilder, ConsensusManager, }, - proof_of_work::lwma_diff::LWMA_MAX_BLOCK_TIME_RATIO, + proof_of_work::{lwma_diff::LWMA_MAX_BLOCK_TIME_RATIO, Difficulty}, test_helpers::{ blockchain::{ create_chained_blocks, @@ -2696,7 +2696,7 @@ mod test { let (_, main_chain) = create_main_chain(&db, &[("A->GB", 1, 120)]).await; let fork_root = main_chain.get("A").unwrap().clone(); - let (_, orphan_chain) = create_chained_blocks(&[("B2->GB", 2, 120)], fork_root).await; + let (_, orphan_chain) = create_chained_blocks(&[("B2->GB", 1, 120)], fork_root).await; let mut access = db.db_write_access().unwrap(); let block = orphan_chain.get("B2").unwrap().clone(); @@ -2710,7 +2710,7 @@ mod test { .unwrap(); let fork_tip = access.fetch_orphan_chain_tip_by_hash(block.hash()).unwrap().unwrap(); assert_eq!(fork_tip, block.to_chain_header()); - assert_eq!(fork_tip.accumulated_data().total_accumulated_difficulty, 4); + assert_eq!(fork_tip.accumulated_data().total_accumulated_difficulty, 3); let all_tips = access.fetch_all_orphan_chain_tips().unwrap().len(); assert_eq!(all_tips, 1); @@ -2730,6 +2730,7 @@ mod test { mod handle_possible_reorg { use super::*; + use crate::proof_of_work::Difficulty; #[tokio::test] async fn it_links_many_orphan_branches_to_main_chain() { @@ -2765,7 +2766,11 @@ mod test { } let fork_root = orphan_chain_c.get("6c").unwrap().clone(); - let (_, orphan_chain_d) = create_chained_blocks(block_specs!(["7d->GB", difficulty: 10]), fork_root).await; + let (_, orphan_chain_d) = create_chained_blocks( + block_specs!(["7d->GB", difficulty: Difficulty::from_u64(10).unwrap()]), + fork_root, + ) + .await; let block = orphan_chain_d.get("7d").unwrap().clone(); let result = test.handle_possible_reorg(block.to_arc_block()).unwrap(); @@ -2818,8 +2823,11 @@ mod test { create_main_chain(&test.db, block_specs!(["1a->GB"], ["2a->1a"], ["3a->2a"], ["4a->3a"])).await; let fork_root = main_chain.get("1a").unwrap().clone(); - let (_, orphan_chain_b) = - create_chained_blocks(block_specs!(["2b->GB", height: 10, difficulty: 10]), fork_root).await; + let (_, orphan_chain_b) = create_chained_blocks( + block_specs!(["2b->GB", height: 10, difficulty: Difficulty::from_u64(10).unwrap()]), + fork_root, + ) + .await; let block = orphan_chain_b.get("2b").unwrap().clone(); let err = test.handle_possible_reorg(block.to_arc_block()).unwrap_err(); @@ -2829,7 +2837,11 @@ mod test { #[tokio::test] async fn it_allows_orphan_blocks_with_any_height() { let test = TestHarness::setup(); - let (_, main_chain) = create_main_chain(&test.db, block_specs!(["1a->GB", difficulty: 2])).await; + let (_, main_chain) = create_main_chain( + &test.db, + block_specs!(["1a->GB", difficulty: Difficulty::from_u64(2).unwrap()]), + ) + .await; let fork_root = main_chain.get("GB").unwrap().clone(); let (_, orphan_chain_b) = @@ -3346,8 +3358,8 @@ mod test { .clear_proof_of_work() .add_proof_of_work(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 120 * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: 1.into(), - max_difficulty: 100.into(), + min_difficulty: Difficulty::min(), + max_difficulty: Difficulty::from_u64(100).expect("valid difficulty"), target_time: 120, }) .build(), diff --git a/base_layer/core/src/chain_storage/tests/blockchain_database.rs b/base_layer/core/src/chain_storage/tests/blockchain_database.rs index bfe42b67de..39dd24a2d6 100644 --- a/base_layer/core/src/chain_storage/tests/blockchain_database.rs +++ b/base_layer/core/src/chain_storage/tests/blockchain_database.rs @@ -582,7 +582,12 @@ mod clear_all_pending_headers { let accum = BlockHeaderAccumulatedData::builder(&prev_accum) .with_hash(header.hash()) .with_achieved_target_difficulty( - AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, 0.into(), 0.into()).unwrap(), + AchievedTargetDifficulty::try_construct( + PowAlgorithm::Sha3, + Difficulty::min(), + Difficulty::min(), + ) + .unwrap(), ) .with_total_kernel_offset(Default::default()) .build() diff --git a/base_layer/core/src/consensus/consensus_constants.rs b/base_layer/core/src/consensus/consensus_constants.rs index 6c37d94b71..c35ff0e28f 100644 --- a/base_layer/core/src/consensus/consensus_constants.rs +++ b/base_layer/core/src/consensus/consensus_constants.rs @@ -264,7 +264,7 @@ impl ConsensusConstants { pub fn min_pow_difficulty(&self, pow_algo: PowAlgorithm) -> Difficulty { match self.proof_of_work.get(&pow_algo) { Some(v) => v.min_difficulty, - _ => 0.into(), + _ => Difficulty::min(), } } @@ -276,7 +276,7 @@ impl ConsensusConstants { pub fn max_pow_difficulty(&self, pow_algo: PowAlgorithm) -> Difficulty { match self.proof_of_work.get(&pow_algo) { Some(v) => v.max_difficulty, - _ => 0.into(), + _ => Difficulty::min(), } } @@ -350,14 +350,14 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 1800, - min_difficulty: 1.into(), - max_difficulty: 1.into(), + min_difficulty: Difficulty::min(), + max_difficulty: Difficulty::min(), target_time: 300, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 1200, - min_difficulty: 1.into(), - max_difficulty: 1.into(), + min_difficulty: Difficulty::min(), + max_difficulty: Difficulty::min(), target_time: 200, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); @@ -408,14 +408,16 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: sha3_target_time * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: (sha3_target_time * 67_000).into(), // (target_time x 200_000/3) ... for easy testing - max_difficulty: u64::MAX.into(), + // (target_time x 200_000/3) ... for easy testing + min_difficulty: Difficulty::from_u64(sha3_target_time * 67_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: sha3_target_time, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: monero_target_time * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: (monero_target_time * 100).into(), // (target_time x 300/3) ... for easy testing - max_difficulty: u64::MAX.into(), + // (target_time x 300/3) ... for easy testing + min_difficulty: Difficulty::from_u64(monero_target_time * 100).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: monero_target_time, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); @@ -473,14 +475,14 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 1800, - min_difficulty: 60_000_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 1200, - min_difficulty: 60_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 200, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); @@ -528,14 +530,14 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 1800, - min_difficulty: 60_000_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 1200, - min_difficulty: 60_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 200, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); @@ -577,14 +579,14 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 1800, - min_difficulty: 60_000_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 1200, - min_difficulty: 60_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 200, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); @@ -627,14 +629,14 @@ impl ConsensusConstants { let mut algos = HashMap::new(); algos.insert(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 1800, - min_difficulty: 60_000_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }); algos.insert(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 1200, - min_difficulty: 60_000.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(60_000).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 200, }); let (input_version_range, output_version_range, kernel_version_range) = version_zero(); diff --git a/base_layer/core/src/proof_of_work/difficulty.rs b/base_layer/core/src/proof_of_work/difficulty.rs index b13e229c15..4f19cff2f0 100644 --- a/base_layer/core/src/proof_of_work/difficulty.rs +++ b/base_layer/core/src/proof_of_work/difficulty.rs @@ -20,14 +20,16 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{fmt, ops::Div}; +use std::fmt; -use newtype_ops::newtype_ops; use num_format::{Locale, ToFormattedString}; use serde::{Deserialize, Serialize}; use tari_utilities::epoch_time::EpochTime; -use crate::proof_of_work::error::DifficultyAdjustmentError; +use crate::{ + proof_of_work::{error::DifficultyError, DifficultyAdjustmentError}, + U256, +}; /// Minimum difficulty, enforced in diff retargeting /// avoids getting stuck when trying to increase difficulty subject to dampening @@ -39,8 +41,16 @@ pub struct Difficulty(u64); impl Difficulty { /// A const constructor for Difficulty - pub const fn from_u64(d: u64) -> Self { - Self(d) + pub const fn from_u64(d: u64) -> Result { + if d < MIN_DIFFICULTY { + return Err(DifficultyError::InvalidDifficulty); + } + Ok(Self(d)) + } + + /// Return the difficulty as a u64 + pub fn as_u64(self) -> u64 { + self.0 } /// Difficulty of MIN_DIFFICULTY @@ -48,50 +58,86 @@ impl Difficulty { Difficulty(MIN_DIFFICULTY) } - /// Return the difficulty as a u64 - pub fn as_u64(self) -> u64 { - self.0 + /// Maximum Difficulty + pub const fn max() -> Difficulty { + Difficulty(u64::MAX) + } + + /// Helper function to provide the difficulty of the hash assuming the hash is big_endian + pub fn big_endian_difficulty(hash: &[u8]) -> Result { + let scalar = U256::from_big_endian(hash); // Big endian so the hash has leading zeroes + Difficulty::u256_scalar_to_difficulty(scalar) + } + + /// Helper function to provide the difficulty of the hash assuming the hash is little_endian + pub fn little_endian_difficulty(hash: &[u8]) -> Result { + let scalar = U256::from_little_endian(hash); // Little endian so the hash has trailing zeroes + Difficulty::u256_scalar_to_difficulty(scalar) } - /// Subtract difficulty without overflowing - pub fn checked_sub(self, other: Difficulty) -> Option { - self.0.checked_sub(other.0).map(Difficulty) + fn u256_scalar_to_difficulty(scalar: U256) -> Result { + let result = U256::MAX / scalar; + let result = result.min(u64::MAX.into()); + Difficulty::from_u64(result.low_u64()) } } +/// These traits should not be implemented for `Difficulty`: +/// - `Add for Difficulty` "`+` must not be used, use `checked_add(value)` instead; to prevent overflow +/// - `Sub for Difficulty` `-` must not be used, use `checked_sub(value)` instead; to prevent underflow +/// - `Mul for Difficulty` `*` must not be used at all; difficulties should only be added to or subtracted from +/// - `Div for Difficulty` `/` must not be used at all; difficulties should only be added to or subtracted from +/// - `From for Difficulty` `Difficulty::from` must not be used, use `from_u64(value)` instead; to prevent +/// assignment `< MIN_DIFFICULTY` + impl Default for Difficulty { fn default() -> Self { Difficulty::min() } } -// You can only add or subtract Difficulty from Difficulty -newtype_ops! { [Difficulty] {add sub} {:=} Self Self } -newtype_ops! { [Difficulty] {add sub} {:=} &Self &Self } -newtype_ops! { [Difficulty] {add sub} {:=} Self &Self } - -// Multiplication and division of difficulty by scalar is Difficulty -newtype_ops! { [Difficulty] {mul div rem} {:=} Self u64 } +/// This trait is used to add a type to `CheckedAdd`, which greatly simplifies usage in the code. +/// It is implemented for `Difficulty` and `u64`. +pub trait CheckedAdd { + fn checked_add(&self, other: T) -> Option + where Self: Sized; +} -// Division of difficulty by difficulty is a difficulty ratio (scalar) (newtype_ops doesn't handle this case) -impl Div for Difficulty { - type Output = u64; +impl CheckedAdd for Difficulty { + fn checked_add(&self, other: Difficulty) -> Option { + self.0.checked_add(other.0).map(Difficulty) + } +} - fn div(self, rhs: Self) -> Self::Output { - self.0 / rhs.0 +impl CheckedAdd for Difficulty { + fn checked_add(&self, other: u64) -> Option { + self.checked_add(Difficulty(other)) } } -impl fmt::Display for Difficulty { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let formatted = self.0.to_formatted_string(&Locale::en); - write!(f, "{}", formatted) +/// This trait is used to add a type to `CheckedSub`, which greatly simplifies usage in the code. +/// It is implemented for `Difficulty` and `u64`. +pub trait CheckedSub { + fn checked_sub(&self, other: T) -> Option + where Self: Sized; +} + +impl CheckedSub for Difficulty { + fn checked_sub(&self, other: Difficulty) -> Option { + if let Some(val) = self.0.checked_sub(other.0) { + if val < MIN_DIFFICULTY { + return None; + } + Some(Difficulty(val)) + } else { + None + } } } -impl From for Difficulty { - fn from(value: u64) -> Self { - Difficulty(value) +impl CheckedSub for Difficulty { + fn checked_sub(&self, other: u64) -> Option { + self.checked_sub(Difficulty(other)) } } @@ -101,6 +147,13 @@ impl From for u64 { } } +impl fmt::Display for Difficulty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let formatted = self.0.to_formatted_string(&Locale::en); + write!(f, "{}", formatted) + } +} + /// General difficulty adjustment algorithm trait. The key method is `get_difficulty`, which returns the target /// difficulty given a set of historical achieved difficulties; supplied through the `add` method. pub trait DifficultyAdjustment { @@ -116,103 +169,120 @@ pub trait DifficultyAdjustment { fn get_difficulty(&self) -> Option; } -#[cfg(feature = "base_node")] -pub mod util { - use super::*; - use crate::U256; +#[cfg(test)] +mod test { + use crate::{ + proof_of_work::{ + difficulty::{CheckedAdd, CheckedSub, MIN_DIFFICULTY}, + Difficulty, + }, + U256, + }; - /// This will provide the difficulty of the hash assuming the hash is big_endian - pub(crate) fn big_endian_difficulty(hash: &[u8]) -> Difficulty { - let scalar = U256::from_big_endian(hash); // Big endian so the hash has leading zeroes - let result = U256::MAX / scalar; - let result = result.min(u64::MAX.into()); - result.low_u64().into() + #[test] + fn add_difficulty() { + assert_eq!( + Difficulty::from_u64(1_000).unwrap().checked_add(8_000).unwrap(), + Difficulty::from_u64(9_000).unwrap() + ); + assert_eq!( + Difficulty::default().checked_add(42).unwrap(), + Difficulty::from_u64(MIN_DIFFICULTY + 42).unwrap() + ); + assert_eq!( + Difficulty::from_u64(15).unwrap().checked_add(5).unwrap(), + Difficulty::from_u64(20).unwrap() + ); } - /// This will provide the difficulty of the hash assuming the hash is little_endian - pub(crate) fn little_endian_difficulty(hash: &[u8]) -> Difficulty { - let scalar = U256::from_little_endian(hash); // Little endian so the hash has trailing zeroes - let result = U256::MAX / scalar; - let result = result.min(u64::MAX.into()); - result.low_u64().into() + #[test] + fn test_format() { + let d = Difficulty::from_u64(1_000_000).unwrap(); + assert_eq!("1,000,000", format!("{}", d)); } - #[cfg(test)] - mod test { - use super::*; - - #[test] - fn be_high_target() { - let target: &[u8] = &[ - 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - ]; - let expected = Difficulty::from(1); - assert_eq!(big_endian_difficulty(target), expected); - } - - #[test] - fn be_max_difficulty() { - let target = U256::MAX / U256::from(u64::MAX); - let mut bytes = [0u8; 32]; - target.to_big_endian(&mut bytes); - assert_eq!(big_endian_difficulty(&bytes), Difficulty::from(u64::MAX)); - } - - #[test] - fn be_stop_overflow() { - let target: u64 = 64; - let expected = u64::MAX; - assert_eq!(big_endian_difficulty(&target.to_be_bytes()), Difficulty::from(expected)); + #[test] + fn difficulty_converts_correctly_at_its_limits() { + for d in 0..=MIN_DIFFICULTY + 1 { + if d < MIN_DIFFICULTY { + assert!(Difficulty::from_u64(d).is_err()); + } else { + assert!(Difficulty::from_u64(d).is_ok()); + } } + assert_eq!(Difficulty::min().as_u64(), MIN_DIFFICULTY); + assert_eq!(Difficulty::max().as_u64(), u64::MAX); + } - #[test] - fn le_high_target() { - let target: &[u8] = &[ - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, - 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, - ]; - let expected = Difficulty::from(1); - assert_eq!(little_endian_difficulty(target), expected); - } + #[test] + fn addition_does_not_overflow() { + let d1 = Difficulty::from_u64(100).unwrap(); + assert!(d1.checked_add(1).is_some()); + let d2 = Difficulty::max(); + assert!(d2.checked_add(1).is_none()); + } - #[test] - fn le_max_difficulty() { - let target = U256::MAX / U256::from(u64::MAX); - let mut bytes = [0u8; 32]; - target.to_little_endian(&mut bytes); - assert_eq!(little_endian_difficulty(&bytes), Difficulty::from(u64::MAX)); - } + #[test] + fn subtraction_does_not_underflow() { + let d1 = Difficulty::from_u64(100).unwrap(); + assert!(d1.checked_sub(1).is_some()); + let d2 = Difficulty::max(); + assert!(d1.checked_sub(d2).is_none()); + } - #[test] - fn le_stop_overflow() { - let target: u64 = 64; - let expected = u64::MAX; - assert_eq!( - little_endian_difficulty(&target.to_be_bytes()), - Difficulty::from(expected) - ); - } + #[test] + fn be_high_target() { + let target: &[u8] = &[ + 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + ]; + let expected = Difficulty::min(); + assert_eq!(Difficulty::big_endian_difficulty(target).unwrap(), expected); } -} -#[cfg(test)] -mod test { - use crate::proof_of_work::difficulty::Difficulty; + #[test] + fn be_max_difficulty() { + let target = U256::MAX / U256::from(u64::MAX); + let mut bytes = [0u8; 32]; + target.to_big_endian(&mut bytes); + assert_eq!(Difficulty::big_endian_difficulty(&bytes).unwrap(), Difficulty::max()); + } #[test] - fn add_difficulty() { + fn be_stop_overflow() { + let target: u64 = 64; + let expected = u64::MAX; assert_eq!( - Difficulty::from(1_000) + Difficulty::from(8_000), - Difficulty::from(9_000) + Difficulty::big_endian_difficulty(&target.to_be_bytes()).unwrap(), + Difficulty::from_u64(expected).unwrap() ); - assert_eq!(Difficulty::default() + Difficulty::from(42), Difficulty::from(43)); - assert_eq!(Difficulty::from(15) + Difficulty::from(5), Difficulty::from(20)); } #[test] - fn test_format() { - let d = Difficulty::from(1_000_000); - assert_eq!("1,000,000", format!("{}", d)); + fn le_high_target() { + let target: &[u8] = &[ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, + ]; + let expected = Difficulty::min(); + assert_eq!(Difficulty::little_endian_difficulty(target).unwrap(), expected); + } + + #[test] + fn le_max_difficulty() { + let target = U256::MAX / U256::from(u64::MAX); + let mut bytes = [0u8; 32]; + target.to_little_endian(&mut bytes); + assert_eq!(Difficulty::little_endian_difficulty(&bytes).unwrap(), Difficulty::max()); + } + + #[test] + fn le_stop_overflow() { + let target: u64 = 64; + let expected = u64::MAX; + assert_eq!( + Difficulty::little_endian_difficulty(&target.to_be_bytes()).unwrap(), + Difficulty::from_u64(expected).unwrap() + ); } } diff --git a/base_layer/core/src/proof_of_work/error.rs b/base_layer/core/src/proof_of_work/error.rs index 6a0f5d9fed..5b8228423b 100644 --- a/base_layer/core/src/proof_of_work/error.rs +++ b/base_layer/core/src/proof_of_work/error.rs @@ -46,3 +46,9 @@ pub enum DifficultyAdjustmentError { #[error("Other difficulty algorithm error")] Other, } + +#[derive(Debug, Error)] +pub enum DifficultyError { + #[error("Difficulty conversion less than the minimum difficulty")] + InvalidDifficulty, +} diff --git a/base_layer/core/src/proof_of_work/lwma_diff.rs b/base_layer/core/src/proof_of_work/lwma_diff.rs index c63a9df767..54b2ced06b 100644 --- a/base_layer/core/src/proof_of_work/lwma_diff.rs +++ b/base_layer/core/src/proof_of_work/lwma_diff.rs @@ -127,7 +127,11 @@ impl LinearWeightedMovingAverage { target ); trace!(target: LOG_TARGET, "New target difficulty: {}", target); - Some(target.into()) + if target < Difficulty::min().as_u64() { + None + } else { + Some(Difficulty::from_u64(target).expect("Difficulty is valid")) + } } pub fn is_full(&self) -> bool { @@ -189,13 +193,13 @@ mod test { let dif = LinearWeightedMovingAverage::new(0, 120, 120 * LWMA_MAX_BLOCK_TIME_RATIO); assert!(dif.is_err()); let mut dif = LinearWeightedMovingAverage::new(1, 120, 120 * LWMA_MAX_BLOCK_TIME_RATIO).unwrap(); - dif.add_front(60.into(), 100.into()); + dif.add_front(60.into(), Difficulty::from_u64(100).unwrap()); assert!(!dif.is_full()); assert_eq!(dif.num_samples(), 1); - dif.add_front(60.into(), 100.into()); + dif.add_front(60.into(), Difficulty::from_u64(100).unwrap()); assert_eq!(dif.num_samples(), 2); assert!(dif.is_full()); - dif.add_front(60.into(), 100.into()); + dif.add_front(60.into(), Difficulty::from_u64(100).unwrap()); assert_eq!(dif.num_samples(), 2); assert!(dif.is_full()); } @@ -204,7 +208,7 @@ mod test { fn lwma_negative_solve_times() { let mut dif = LinearWeightedMovingAverage::new(90, 120, 120 * LWMA_MAX_BLOCK_TIME_RATIO).unwrap(); let mut timestamp = 60.into(); - let cum_diff = Difficulty::from(100); + let cum_diff = Difficulty::from_u64(100).unwrap(); let _ = dif.add(timestamp, cum_diff); timestamp = timestamp.increase(60); let _ = dif.add(timestamp, cum_diff); @@ -228,11 +232,11 @@ mod test { #[test] fn lwma_limit_difficulty_change() { let mut dif = LinearWeightedMovingAverage::new(5, 60, 60 * LWMA_MAX_BLOCK_TIME_RATIO).unwrap(); - let _ = dif.add(60.into(), 100.into()); - let _ = dif.add(10_000_000.into(), 100.into()); - assert_eq!(dif.get_difficulty().unwrap(), 16.into()); - let _ = dif.add(20_000_000.into(), 16.into()); - assert_eq!(dif.get_difficulty().unwrap(), 9.into()); + let _ = dif.add(60.into(), Difficulty::from_u64(100).unwrap()); + let _ = dif.add(10_000_000.into(), Difficulty::from_u64(100).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(16).unwrap()); + let _ = dif.add(20_000_000.into(), Difficulty::from_u64(16).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(9).unwrap()); } // Data for 5-period moving average @@ -245,43 +249,43 @@ mod test { #[test] fn lwma_calculate() { let mut dif = LinearWeightedMovingAverage::new(5, 60, 60 * LWMA_MAX_BLOCK_TIME_RATIO).unwrap(); - let _ = dif.add(60.into(), 100.into()); + let _ = dif.add(60.into(), Difficulty::from_u64(100).unwrap()); assert_eq!(dif.get_difficulty(), None); - let _ = dif.add(120.into(), 100.into()); - assert_eq!(dif.get_difficulty().unwrap(), 100.into()); - let _ = dif.add(180.into(), 100.into()); - assert_eq!(dif.get_difficulty().unwrap(), 100.into()); - let _ = dif.add(240.into(), 100.into()); - assert_eq!(dif.get_difficulty().unwrap(), 100.into()); - let _ = dif.add(300.into(), 100.into()); - assert_eq!(dif.get_difficulty().unwrap(), 100.into()); - let _ = dif.add(350.into(), 105.into()); - assert_eq!(dif.get_difficulty().unwrap(), 106.into()); - let _ = dif.add(380.into(), 128.into()); - assert_eq!(dif.get_difficulty().unwrap(), 134.into()); - let _ = dif.add(445.into(), 123.into()); - assert_eq!(dif.get_difficulty().unwrap(), 128.into()); - let _ = dif.add(515.into(), 116.into()); - assert_eq!(dif.get_difficulty().unwrap(), 119.into()); - let _ = dif.add(615.into(), 94.into()); - assert_eq!(dif.get_difficulty().unwrap(), 93.into()); - let _ = dif.add(975.into(), 39.into()); - assert_eq!(dif.get_difficulty().unwrap(), 35.into()); - let _ = dif.add(976.into(), 46.into()); - assert_eq!(dif.get_difficulty().unwrap(), 38.into()); - let _ = dif.add(977.into(), 55.into()); - assert_eq!(dif.get_difficulty().unwrap(), 46.into()); - let _ = dif.add(978.into(), 75.into()); - assert_eq!(dif.get_difficulty().unwrap(), 65.into()); - let _ = dif.add(979.into(), 148.into()); - assert_eq!(dif.get_difficulty().unwrap(), 173.into()); + let _ = dif.add(120.into(), Difficulty::from_u64(100).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(100).unwrap()); + let _ = dif.add(180.into(), Difficulty::from_u64(100).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(100).unwrap()); + let _ = dif.add(240.into(), Difficulty::from_u64(100).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(100).unwrap()); + let _ = dif.add(300.into(), Difficulty::from_u64(100).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(100).unwrap()); + let _ = dif.add(350.into(), Difficulty::from_u64(105).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(106).unwrap()); + let _ = dif.add(380.into(), Difficulty::from_u64(128).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(134).unwrap()); + let _ = dif.add(445.into(), Difficulty::from_u64(123).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(128).unwrap()); + let _ = dif.add(515.into(), Difficulty::from_u64(116).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(119).unwrap()); + let _ = dif.add(615.into(), Difficulty::from_u64(94).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(93).unwrap()); + let _ = dif.add(975.into(), Difficulty::from_u64(39).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(35).unwrap()); + let _ = dif.add(976.into(), Difficulty::from_u64(46).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(38).unwrap()); + let _ = dif.add(977.into(), Difficulty::from_u64(55).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(46).unwrap()); + let _ = dif.add(978.into(), Difficulty::from_u64(75).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(65).unwrap()); + let _ = dif.add(979.into(), Difficulty::from_u64(148).unwrap()); + assert_eq!(dif.get_difficulty().unwrap(), Difficulty::from_u64(173).unwrap()); } #[test] fn ensure_calculate_does_not_overflow_with_large_block_window() { let mut dif = LinearWeightedMovingAverage::new(6000, 60, 60 * LWMA_MAX_BLOCK_TIME_RATIO).unwrap(); for _i in 0..6000 { - let _ = dif.add(60.into(), u64::MAX.into()); + let _ = dif.add(60.into(), Difficulty::max()); } // We don't care about the value, we just want to test that get_difficulty does not panic with an overflow. dif.get_difficulty().unwrap(); diff --git a/base_layer/core/src/proof_of_work/mod.rs b/base_layer/core/src/proof_of_work/mod.rs index a368f53c2f..f2745bd43b 100644 --- a/base_layer/core/src/proof_of_work/mod.rs +++ b/base_layer/core/src/proof_of_work/mod.rs @@ -20,32 +20,38 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +/// Crates for proof of work difficulty #[cfg(any(feature = "base_node", feature = "transactions"))] pub(crate) mod difficulty; #[cfg(any(feature = "base_node", feature = "transactions"))] pub use difficulty::{Difficulty, DifficultyAdjustment}; +/// Crates for proof of work error #[cfg(any(feature = "base_node", feature = "transactions"))] mod error; #[cfg(any(feature = "base_node", feature = "transactions"))] -pub use error::{DifficultyAdjustmentError, PowError}; +pub use error::{DifficultyAdjustmentError, DifficultyError, PowError}; +/// Crates for proof of work monero_rx #[cfg(feature = "base_node")] pub mod monero_rx; #[cfg(feature = "base_node")] pub use monero_rx::monero_difficulty; +/// Crate for proof of work itself #[cfg(any(feature = "base_node", feature = "transactions"))] #[allow(clippy::module_inception)] mod proof_of_work; #[cfg(any(feature = "base_node", feature = "transactions"))] pub use proof_of_work::ProofOfWork; +/// Crates for proof of work proof_of_work_algorithm #[cfg(any(feature = "base_node", feature = "transactions"))] mod proof_of_work_algorithm; #[cfg(any(feature = "base_node", feature = "transactions"))] pub use proof_of_work_algorithm::PowAlgorithm; +/// Crates for proof of work sha3_pow #[cfg(feature = "base_node")] mod sha3_pow; #[cfg(feature = "base_node")] @@ -53,15 +59,19 @@ pub use sha3_pow::sha3x_difficulty; #[cfg(all(test, feature = "base_node"))] pub use sha3_pow::test as sha3_test; +/// Crates for proof of work target_difficulty mod target_difficulty; pub use target_difficulty::AchievedTargetDifficulty; +/// Crates for proof of work target_difficulty_window #[cfg(feature = "base_node")] mod target_difficulty_window; #[cfg(feature = "base_node")] pub use target_difficulty_window::TargetDifficultyWindow; +/// Crates for proof of work lwma_diff pub mod lwma_diff; +/// Crates for proof of work randomx_factory #[cfg(feature = "base_node")] pub mod randomx_factory; diff --git a/base_layer/core/src/proof_of_work/monero_rx/error.rs b/base_layer/core/src/proof_of_work/monero_rx/error.rs index 5ed779822f..cf50f3fc16 100644 --- a/base_layer/core/src/proof_of_work/monero_rx/error.rs +++ b/base_layer/core/src/proof_of_work/monero_rx/error.rs @@ -23,6 +23,8 @@ use randomx_rs::RandomXError; use tari_utilities::hex::HexError; +use crate::proof_of_work::DifficultyError; + #[derive(Debug, thiserror::Error)] pub enum MergeMineError { #[error("Serialization error: {0}")] @@ -39,4 +41,6 @@ pub enum MergeMineError { HexError(#[from] HexError), #[error("Monero PoW data did not contain a valid merkle root")] InvalidMerkleRoot, + #[error("Invalid difficulty: {0}")] + DifficultyError(#[from] DifficultyError), } diff --git a/base_layer/core/src/proof_of_work/monero_rx/helpers.rs b/base_layer/core/src/proof_of_work/monero_rx/helpers.rs index 7120c786fc..5547e19fe7 100644 --- a/base_layer/core/src/proof_of_work/monero_rx/helpers.rs +++ b/base_layer/core/src/proof_of_work/monero_rx/helpers.rs @@ -39,7 +39,6 @@ use super::{ use crate::{ blocks::BlockHeader, proof_of_work::{ - difficulty::util::little_endian_difficulty, randomx_factory::{RandomXFactory, RandomXVMInstance}, Difficulty, }, @@ -59,7 +58,7 @@ pub fn monero_difficulty(header: &BlockHeader, randomx_factory: &RandomXFactory) fn get_random_x_difficulty(input: &[u8], vm: &RandomXVMInstance) -> Result<(Difficulty, Vec), MergeMineError> { let hash = vm.calculate_hash(input)?; debug!(target: LOG_TARGET, "RandomX Hash: {:?}", hash); - let difficulty = little_endian_difficulty(&hash); + let difficulty = Difficulty::little_endian_difficulty(&hash)?; Ok((difficulty, hash)) } diff --git a/base_layer/core/src/proof_of_work/sha3_pow.rs b/base_layer/core/src/proof_of_work/sha3_pow.rs index 4e70ab652c..bd4014d053 100644 --- a/base_layer/core/src/proof_of_work/sha3_pow.rs +++ b/base_layer/core/src/proof_of_work/sha3_pow.rs @@ -24,7 +24,7 @@ use sha3::{Digest, Sha3_256}; use crate::{ blocks::BlockHeader, - proof_of_work::{difficulty::util::big_endian_difficulty, Difficulty}, + proof_of_work::{error::DifficultyError, Difficulty}, }; /// The Tari Sha3X proof-of-work algorithm. This is the reference implementation of Tari's standalone mining @@ -33,8 +33,8 @@ use crate::{ /// In short Sha3X is a triple Keccak Sha3-256 hash of the nonce, mining hash and PoW mode byte. /// Mining using this CPU version of the algorithm is unlikely to be profitable, but is included for reference and /// can be used to mine tXTR on testnets. -pub fn sha3x_difficulty(header: &BlockHeader) -> Difficulty { - sha3x_difficulty_with_hash(header).0 +pub fn sha3x_difficulty(header: &BlockHeader) -> Result { + Ok(sha3x_difficulty_with_hash(header)?.0) } pub fn sha3_hash(header: &BlockHeader) -> Vec { @@ -46,12 +46,12 @@ pub fn sha3_hash(header: &BlockHeader) -> Vec { .to_vec() } -fn sha3x_difficulty_with_hash(header: &BlockHeader) -> (Difficulty, Vec) { +fn sha3x_difficulty_with_hash(header: &BlockHeader) -> Result<(Difficulty, Vec), DifficultyError> { let hash = sha3_hash(header); let hash = Sha3_256::digest(&hash); let hash = Sha3_256::digest(&hash); - let difficulty = big_endian_difficulty(&hash); - (difficulty, hash.to_vec()) + let difficulty = Difficulty::big_endian_difficulty(&hash)?; + Ok((difficulty, hash.to_vec())) } #[cfg(test)] @@ -70,7 +70,7 @@ pub mod test { fn mine_sha3(target_difficulty: Difficulty, header: &mut BlockHeader) -> u64 { header.nonce = 0; // We're mining over here! - while sha3x_difficulty(header) < target_difficulty { + while sha3x_difficulty(header).unwrap() < target_difficulty { header.nonce += 1; } header.nonce @@ -98,6 +98,6 @@ pub mod test { let mut header = get_header(); header.nonce = 6; println!("{:?}", header); - assert_eq!(sha3x_difficulty(&header), Difficulty::from(899)); + assert_eq!(sha3x_difficulty(&header).unwrap(), Difficulty::from_u64(899).unwrap()); } } diff --git a/base_layer/core/src/proof_of_work/target_difficulty_window.rs b/base_layer/core/src/proof_of_work/target_difficulty_window.rs index 8d234e9950..f0c1c47f03 100644 --- a/base_layer/core/src/proof_of_work/target_difficulty_window.rs +++ b/base_layer/core/src/proof_of_work/target_difficulty_window.rs @@ -79,15 +79,18 @@ mod test { #[test] fn it_calculates_the_target_difficulty() { let mut target_difficulties = TargetDifficultyWindow::new(5, 60, 60 * 6).unwrap(); - let mut time = 60.into(); - target_difficulties.add_back(time, 100.into()); - time += 60.into(); - target_difficulties.add_back(time, 100.into()); - time += 60.into(); - target_difficulties.add_back(time, 100.into()); - time += 60.into(); - target_difficulties.add_back(time, 100.into()); + let mut time = Difficulty::from_u64(60).unwrap().as_u64().into(); + target_difficulties.add_back(time, Difficulty::from_u64(100).unwrap()); + time += Difficulty::from_u64(60).unwrap().as_u64().into(); + target_difficulties.add_back(time, Difficulty::from_u64(100).unwrap()); + time += Difficulty::from_u64(60).unwrap().as_u64().into(); + target_difficulties.add_back(time, Difficulty::from_u64(100).unwrap()); + time += Difficulty::from_u64(60).unwrap().as_u64().into(); + target_difficulties.add_back(time, Difficulty::from_u64(100).unwrap()); - assert_eq!(target_difficulties.calculate(1.into(), 400.into()), 100.into()); + assert_eq!( + target_difficulties.calculate(Difficulty::from_u64(1).unwrap(), Difficulty::from_u64(400).unwrap()), + Difficulty::from_u64(100).unwrap() + ); } } diff --git a/base_layer/core/src/proto/block.rs b/base_layer/core/src/proto/block.rs index 893dd72eaf..20ec7ca23e 100644 --- a/base_layer/core/src/proto/block.rs +++ b/base_layer/core/src/proto/block.rs @@ -28,7 +28,7 @@ use tari_utilities::ByteArray; use super::core as proto; use crate::{ blocks::{Block, BlockHeaderAccumulatedData, HistoricalBlock, NewBlock, NewBlockHeaderTemplate, NewBlockTemplate}, - proof_of_work::ProofOfWork, + proof_of_work::{Difficulty, ProofOfWork}, }; //---------------------------------- Block --------------------------------------------// @@ -134,11 +134,13 @@ impl TryFrom for BlockHeaderAccumulatedData { let hash = source.hash.try_into().map_err(|_| "Malformed hash".to_string())?; Ok(Self { hash, - achieved_difficulty: source.achieved_difficulty.into(), + achieved_difficulty: Difficulty::from_u64(source.achieved_difficulty).map_err(|e| e.to_string())?, total_accumulated_difficulty: accumulated_difficulty, - accumulated_monero_difficulty: source.accumulated_monero_difficulty.into(), - accumulated_sha_difficulty: source.accumulated_sha_difficulty.into(), - target_difficulty: source.target_difficulty.into(), + accumulated_monero_difficulty: Difficulty::from_u64(source.accumulated_monero_difficulty) + .map_err(|e| e.to_string())?, + accumulated_sha_difficulty: Difficulty::from_u64(source.accumulated_sha_difficulty) + .map_err(|e| e.to_string())?, + target_difficulty: Difficulty::from_u64(source.target_difficulty).map_err(|e| e.to_string())?, total_kernel_offset: PrivateKey::from_bytes(source.total_kernel_offset.as_slice()) .map_err(|err| format!("Invalid value for total_kernel_offset: {}", err))?, }) @@ -164,7 +166,7 @@ impl TryFrom for NewBlockTemplate { Ok(Self { header, body, - target_difficulty: block_template.target_difficulty.into(), + target_difficulty: Difficulty::from_u64(block_template.target_difficulty).map_err(|e| e.to_string())?, reward: block_template.reward.into(), total_fees: block_template.total_fees.into(), }) diff --git a/base_layer/core/src/test_helpers/block_spec.rs b/base_layer/core/src/test_helpers/block_spec.rs index 4488c5a11c..0ae800d7f4 100644 --- a/base_layer/core/src/test_helpers/block_spec.rs +++ b/base_layer/core/src/test_helpers/block_spec.rs @@ -64,7 +64,7 @@ impl<'a> From<&'a [(&'static str, u64, u64)]> for BlockSpecs { BlockSpec::builder() .with_name(name) .with_block_time(*time) - .with_difficulty((*diff).into()) + .with_difficulty(Difficulty::from_u64(*diff).unwrap()) .finish() }) .collect(), @@ -237,7 +237,7 @@ impl Default for BlockSpec { Self { name: "", parent: "", - difficulty: 1.into(), + difficulty: Difficulty::min(), block_time: 120, height_override: None, reward_override: None, diff --git a/base_layer/core/src/test_helpers/blockchain.rs b/base_layer/core/src/test_helpers/blockchain.rs index 3d4eb5fbac..735963678a 100644 --- a/base_layer/core/src/test_helpers/blockchain.rs +++ b/base_layer/core/src/test_helpers/blockchain.rs @@ -463,8 +463,7 @@ fn mine_block(block: Block, prev_block_accum: &BlockHeaderAccumulatedData, diffi let accum = BlockHeaderAccumulatedData::builder(prev_block_accum) .with_hash(block.hash()) .with_achieved_target_difficulty( - AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, (difficulty.as_u64() - 1).into(), difficulty) - .unwrap(), + AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, difficulty, difficulty).unwrap(), ) .with_total_kernel_offset(block.header.total_kernel_offset.clone()) .build() diff --git a/base_layer/core/src/test_helpers/mod.rs b/base_layer/core/src/test_helpers/mod.rs index 14ff8293f1..4d3894fadb 100644 --- a/base_layer/core/src/test_helpers/mod.rs +++ b/base_layer/core/src/test_helpers/mod.rs @@ -37,7 +37,7 @@ use tari_storage::{lmdb_store::LMDBBuilder, LMDBWrapper}; use crate::{ blocks::{Block, BlockHeader, BlockHeaderAccumulatedData, ChainHeader}, consensus::{ConsensusConstants, ConsensusManager}, - proof_of_work::{sha3x_difficulty, AchievedTargetDifficulty, Difficulty}, + proof_of_work::{difficulty::CheckedAdd, sha3x_difficulty, AchievedTargetDifficulty, Difficulty}, transactions::{ key_manager::TransactionKeyManagerBranch, test_helpers::TestKeyManager, @@ -124,7 +124,7 @@ pub fn mine_to_difficulty(mut block: Block, difficulty: Difficulty) -> Result>(data_path: P) -> Arc { } pub fn create_chain_header(header: BlockHeader, prev_accum: &BlockHeaderAccumulatedData) -> ChainHeader { - let achieved_target_diff = AchievedTargetDifficulty::try_construct(header.pow_algo(), 1.into(), 1.into()).unwrap(); + let achieved_target_diff = AchievedTargetDifficulty::try_construct( + header.pow_algo(), + Difficulty::min().checked_add(1).unwrap(), + Difficulty::min().checked_add(1).unwrap(), + ) + .unwrap(); let accumulated_data = BlockHeaderAccumulatedData::builder(prev_accum) .with_hash(header.hash()) .with_achieved_target_difficulty(achieved_target_diff) diff --git a/base_layer/core/src/transactions/transaction_components/wallet_output.rs b/base_layer/core/src/transactions/transaction_components/wallet_output.rs index 14c5126781..a155774b2a 100644 --- a/base_layer/core/src/transactions/transaction_components/wallet_output.rs +++ b/base_layer/core/src/transactions/transaction_components/wallet_output.rs @@ -155,6 +155,7 @@ impl WalletOutput { } } + #[allow(clippy::too_many_arguments)] pub async fn new_current_version( value: MicroTari, spending_key_id: TariKeyId, diff --git a/base_layer/core/src/validation/block_body/test.rs b/base_layer/core/src/validation/block_body/test.rs index e86778258b..5304f9e904 100644 --- a/base_layer/core/src/validation/block_body/test.rs +++ b/base_layer/core/src/validation/block_body/test.rs @@ -31,6 +31,7 @@ use crate::{ block_spec, blocks::BlockValidationError, consensus::{ConsensusConstantsBuilder, ConsensusManager}, + proof_of_work::Difficulty, test_helpers::{blockchain::TestBlockchain, BlockSpec}, transactions::{ aggregated_body::AggregateBody, @@ -125,7 +126,7 @@ async fn it_checks_exactly_one_coinbase() { .body .add_output(coinbase_output.to_transaction_output(&blockchain.km).await.unwrap()); block.body.sort(); - let block = blockchain.mine_block("GB", block, 1.into()); + let block = blockchain.mine_block("GB", block, Difficulty::min()); let err = { // `MutexGuard` cannot be held across an `await` point @@ -143,7 +144,7 @@ async fn it_checks_exactly_one_coinbase() { let (block, _) = blockchain .create_unmined_block(block_spec!("A2", parent: "GB", skip_coinbase: true,)) .await; - let block = blockchain.mine_block("GB", block, 1.into()); + let block = blockchain.mine_block("GB", block, Difficulty::min()); let txn = blockchain.db().db_read_access().unwrap(); let err = validator.validate_body(&*txn, block.block()).unwrap_err(); @@ -219,7 +220,7 @@ async fn it_checks_txo_sort_order() { let inputs = block.body.inputs().clone(); let kernels = block.body.kernels().clone(); block.body = AggregateBody::new_sorted_unchecked(inputs, outputs, kernels); - let block = blockchain.mine_block("A", block, 1.into()); + let block = blockchain.mine_block("A", block, Difficulty::min()); let txn = blockchain.db().db_read_access().unwrap(); let err = validator.validate_body(&*txn, block.block()).unwrap_err(); diff --git a/base_layer/core/src/validation/error.rs b/base_layer/core/src/validation/error.rs index d58381d8b1..23d57d7b1f 100644 --- a/base_layer/core/src/validation/error.rs +++ b/base_layer/core/src/validation/error.rs @@ -29,7 +29,7 @@ use crate::{ blocks::{BlockHeaderValidationError, BlockValidationError}, chain_storage::ChainStorageError, covenants::CovenantError, - proof_of_work::{monero_rx::MergeMineError, PowError}, + proof_of_work::{monero_rx::MergeMineError, DifficultyError, PowError}, transactions::{ tari_amount::MicroTari, transaction_components::{OutputType, RangeProofType, TransactionError}, @@ -151,6 +151,8 @@ pub enum ValidationError { InvalidValidatorNodeSignature, #[error("Not enough timestamps provided. Expected {expected}, got {actual}")] NotEnoughTimestamps { expected: usize, actual: usize }, + #[error("Invalid difficulty: {0}")] + DifficultyError(#[from] DifficultyError), } // ChainStorageError has a ValidationError variant, so to prevent a cyclic dependency we use a string representation in diff --git a/base_layer/core/src/validation/helpers.rs b/base_layer/core/src/validation/helpers.rs index 0f0a4ac7d0..397c43bc4f 100644 --- a/base_layer/core/src/validation/helpers.rs +++ b/base_layer/core/src/validation/helpers.rs @@ -111,7 +111,7 @@ pub fn check_target_difficulty( ) -> Result { let achieved = match block_header.pow_algo() { PowAlgorithm::Monero => monero_difficulty(block_header, randomx_factory)?, - PowAlgorithm::Sha3 => sha3x_difficulty(block_header), + PowAlgorithm::Sha3 => sha3x_difficulty(block_header)?, }; match AchievedTargetDifficulty::try_construct(block_header.pow_algo(), target, achieved) { diff --git a/base_layer/core/src/validation/mocks.rs b/base_layer/core/src/validation/mocks.rs index 43e445085e..1a9034f08d 100644 --- a/base_layer/core/src/validation/mocks.rs +++ b/base_layer/core/src/validation/mocks.rs @@ -38,7 +38,7 @@ use super::{ use crate::{ blocks::{Block, BlockHeader, ChainBlock}, chain_storage::BlockchainBackend, - proof_of_work::{sha3x_difficulty, AchievedTargetDifficulty, Difficulty, PowAlgorithm}, + proof_of_work::{difficulty::CheckedAdd, sha3x_difficulty, AchievedTargetDifficulty, Difficulty, PowAlgorithm}, transactions::transaction_components::Transaction, validation::{error::ValidationError, FinalHorizonStateValidation}, }; @@ -115,10 +115,10 @@ impl HeaderChainLinkedValidator for MockValidator { _: Option, ) -> Result { if self.is_valid.load(Ordering::SeqCst) { - let achieved = sha3x_difficulty(header); + let achieved = sha3x_difficulty(header)?; let achieved_target = - AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, achieved - Difficulty::from(1), achieved) + AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, achieved, achieved.checked_add(1).unwrap()) .unwrap(); Ok(achieved_target) } else { diff --git a/base_layer/core/tests/chain_storage_tests/chain_storage.rs b/base_layer/core/tests/chain_storage_tests/chain_storage.rs index 7f5a2fd2be..58ca108c53 100644 --- a/base_layer/core/tests/chain_storage_tests/chain_storage.rs +++ b/base_layer/core/tests/chain_storage_tests/chain_storage.rs @@ -160,7 +160,7 @@ fn test_add_multiple_blocks() { &block0.try_into_chain_block().unwrap(), vec![], &consensus_manager, - 1.into(), + Difficulty::min(), ) .unwrap(); let metadata = store.get_chain_metadata().unwrap(); @@ -188,7 +188,7 @@ fn test_checkpoints() { to: vec![MicroTari(5_000), MicroTari(6_000)] ); let (txn, _) = spend_utxos(txn); - let block1 = append_block(&db, &blocks[0], vec![txn], &consensus_manager, 1.into()).unwrap(); + let block1 = append_block(&db, &blocks[0], vec![txn], &consensus_manager, Difficulty::min()).unwrap(); // Get the checkpoint let block_a = db.fetch_block(0, false).unwrap(); assert_eq!(block_a.confirmations(), 2); @@ -265,7 +265,7 @@ fn test_coverage_chain_storage() { &block0.clone().try_into_chain_block().unwrap(), vec![], &rules, - 1.into(), + Difficulty::min(), ) .unwrap(); assert_eq!(store.fetch_all_reorgs().unwrap(), vec![]); @@ -303,10 +303,10 @@ fn test_rewind_past_horizon_height() { ) .unwrap(); - let block1 = append_block(&store, &block0, vec![], &consensus_manager, 1.into()).unwrap(); - let block2 = append_block(&store, &block1, vec![], &consensus_manager, 1.into()).unwrap(); - let block3 = append_block(&store, &block2, vec![], &consensus_manager, 1.into()).unwrap(); - let _block4 = append_block(&store, &block3, vec![], &consensus_manager, 1.into()).unwrap(); + let block1 = append_block(&store, &block0, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block2 = append_block(&store, &block1, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block3 = append_block(&store, &block2, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let _block4 = append_block(&store, &block3, vec![], &consensus_manager, Difficulty::min()).unwrap(); let metadata = store.get_chain_metadata().unwrap(); assert_eq!(metadata.height_of_longest_chain(), 4); @@ -1223,7 +1223,7 @@ fn test_handle_reorg_failure_recovery() { let mut block = orphan1_store.prepare_new_block(template).unwrap(); block.header.nonce = OsRng.next_u64(); block.header.height += 1; - find_header_with_achieved_difficulty(&mut block.header, Difficulty::from(2)); + find_header_with_achieved_difficulty(&mut block.header, Difficulty::from_u64(2).unwrap()); block }; @@ -1268,10 +1268,10 @@ fn test_store_and_retrieve_blocks() { &block0.clone().try_into_chain_block().unwrap(), vec![], &rules, - 1.into(), + Difficulty::min(), ) .unwrap(); - let block2 = append_block(&store, &block1, vec![], &rules, 1.into()).unwrap(); + let block2 = append_block(&store, &block1, vec![], &rules, Difficulty::min()).unwrap(); assert_eq!( store.fetch_block(0, true).unwrap().try_into_chain_block().unwrap(), block0.clone().try_into_chain_block().unwrap() @@ -1285,7 +1285,7 @@ fn test_store_and_retrieve_blocks() { block2 ); - let block3 = append_block(&store, &block2, vec![], &rules, 1.into()).unwrap(); + let block3 = append_block(&store, &block2, vec![], &rules, Difficulty::min()).unwrap(); assert_eq!( store.fetch_block(0, true).unwrap().try_into_chain_block().unwrap(), block0.try_into_chain_block().unwrap() @@ -1370,7 +1370,7 @@ fn test_restore_metadata_and_pruning_horizon_update() { ) .unwrap(); - let block1 = append_block(&db, &block0, vec![], &rules, 1.into()).unwrap(); + let block1 = append_block(&db, &block0, vec![], &rules, Difficulty::min()).unwrap(); db.add_block(block1.to_arc_block()).unwrap(); block_hash = *block1.hash(); let metadata = db.get_chain_metadata().unwrap(); @@ -1627,10 +1627,10 @@ fn test_horizon_height_orphan_cleanup() { assert_eq!(store.add_block(orphan3.into()).unwrap(), BlockAddResult::OrphanBlock); assert_eq!(store.db_read_access().unwrap().orphan_count().unwrap(), 3); - let block1 = append_block(&store, &block0, vec![], &consensus_manager, 1.into()).unwrap(); - let block2 = append_block(&store, &block1, vec![], &consensus_manager, 1.into()).unwrap(); - let block3 = append_block(&store, &block2, vec![], &consensus_manager, 1.into()).unwrap(); - let _block4 = append_block(&store, &block3, vec![], &consensus_manager, 1.into()).unwrap(); + let block1 = append_block(&store, &block0, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block2 = append_block(&store, &block1, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block3 = append_block(&store, &block2, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let _block4 = append_block(&store, &block3, vec![], &consensus_manager, Difficulty::min()).unwrap(); // Adding another orphan block will trigger the orphan cleanup as the storage limit was reached assert_eq!( @@ -1962,15 +1962,15 @@ fn pruned_mode_cleanup_and_fetch_block() { DifficultyCalculator::new(consensus_manager.clone(), Default::default()), ) .unwrap(); - let block1 = append_block(&store, &block0, vec![], &consensus_manager, 1.into()).unwrap(); - let block2 = append_block(&store, &block1, vec![], &consensus_manager, 1.into()).unwrap(); - let block3 = append_block(&store, &block2, vec![], &consensus_manager, 1.into()).unwrap(); + let block1 = append_block(&store, &block0, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block2 = append_block(&store, &block1, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let block3 = append_block(&store, &block2, vec![], &consensus_manager, Difficulty::min()).unwrap(); let metadata = store.get_chain_metadata().unwrap(); assert_eq!(metadata.pruned_height(), 0); - let block4 = append_block(&store, &block3, vec![], &consensus_manager, 1.into()).unwrap(); - let _block5 = append_block(&store, &block4, vec![], &consensus_manager, 1.into()).unwrap(); + let block4 = append_block(&store, &block3, vec![], &consensus_manager, Difficulty::min()).unwrap(); + let _block5 = append_block(&store, &block4, vec![], &consensus_manager, Difficulty::min()).unwrap(); let metadata = store.get_chain_metadata().unwrap(); assert_eq!(metadata.pruned_height(), 2); diff --git a/base_layer/core/tests/helpers/block_builders.rs b/base_layer/core/tests/helpers/block_builders.rs index 2db61e987a..903ba21a14 100644 --- a/base_layer/core/tests/helpers/block_builders.rs +++ b/base_layer/core/tests/helpers/block_builders.rs @@ -132,7 +132,7 @@ async fn genesis_template( .await; let block = NewBlockTemplate::from_block( header.into_builder().with_coinbase_utxo(utxo, kernel).build(), - 1.into(), + Difficulty::min(), coinbase_value, ); (block, output) @@ -213,17 +213,17 @@ pub async fn create_genesis_block_with_coinbase_value( ) -> (ChainBlock, WalletOutput) { let (template, output) = genesis_template(coinbase_value, consensus_constants, key_manager).await; let mut block = update_genesis_block_mmr_roots(template).unwrap(); - find_header_with_achieved_difficulty(&mut block.header, Difficulty::from(1)); + find_header_with_achieved_difficulty(&mut block.header, Difficulty::from_u64(1).unwrap()); let hash = block.hash(); ( ChainBlock::try_construct(block.into(), BlockHeaderAccumulatedData { hash, total_kernel_offset: Default::default(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }) .unwrap(), output, @@ -254,17 +254,17 @@ pub async fn create_genesis_block_with_utxos( template.body.add_output(output); } let mut block = update_genesis_block_mmr_roots(template).unwrap(); - find_header_with_achieved_difficulty(&mut block.header, Difficulty::from(1)); + find_header_with_achieved_difficulty(&mut block.header, Difficulty::from_u64(1).unwrap()); let hash = block.hash(); ( ChainBlock::try_construct(block.into(), BlockHeaderAccumulatedData { hash, total_kernel_offset: Default::default(), - achieved_difficulty: 1.into(), + achieved_difficulty: Difficulty::min(), total_accumulated_difficulty: 1, - accumulated_monero_difficulty: 1.into(), - accumulated_sha_difficulty: 1.into(), - target_difficulty: 1.into(), + accumulated_monero_difficulty: Difficulty::min(), + accumulated_sha_difficulty: Difficulty::min(), + target_difficulty: Difficulty::min(), }) .unwrap(), outputs, @@ -297,7 +297,7 @@ pub async fn chain_block( .with_coinbase_utxo(coinbase_utxo, coinbase_kernel) .with_transactions(transactions) .build(), - 1.into(), + Difficulty::min(), reward, ) } @@ -319,7 +319,7 @@ pub fn chain_block_with_coinbase( .with_transactions(transactions) .with_coinbase_utxo(coinbase_utxo, coinbase_kernel) .build(), - 1.into(), + Difficulty::min(), consensus.get_block_reward_at(height), ) } @@ -356,7 +356,7 @@ pub async fn chain_block_with_new_coinbase( .with_transactions(transactions) .with_coinbase_utxo(coinbase_utxo, coinbase_kernel) .build(), - 1.into(), + Difficulty::min(), reward, ); (template, coinbase_output) @@ -479,7 +479,7 @@ pub async fn generate_new_block_with_coinbase( pub fn find_header_with_achieved_difficulty(header: &mut BlockHeader, achieved_difficulty: Difficulty) { let mut num_tries = 0; - while sha3x_difficulty(header) != achieved_difficulty { + while sha3x_difficulty(header).unwrap() != achieved_difficulty { header.nonce += 1; num_tries += 1; if num_tries > 10_000_000 { @@ -571,7 +571,7 @@ pub async fn construct_chained_blocks( let mut prev_block = block0; let mut blocks = Vec::new(); for _i in 0..n { - let block = append_block(db, &prev_block, vec![], consensus, 1.into(), key_manager) + let block = append_block(db, &prev_block, vec![], consensus, Difficulty::min(), key_manager) .await .unwrap(); prev_block = block.clone(); diff --git a/base_layer/core/tests/helpers/database.rs b/base_layer/core/tests/helpers/database.rs index 6047e1cc4e..02fa6fbc9e 100644 --- a/base_layer/core/tests/helpers/database.rs +++ b/base_layer/core/tests/helpers/database.rs @@ -25,6 +25,7 @@ use std::convert::TryInto; use tari_core::{ blocks::{Block, BlockHeader, NewBlockTemplate}, consensus::{emission::Emission, ConsensusManager}, + proof_of_work::Difficulty, transactions::{tari_amount::MicroTari, test_helpers::TestKeyManager, transaction_components::Transaction}, }; @@ -56,7 +57,7 @@ pub async fn create_orphan_block( .with_transactions(transactions) .with_coinbase_utxo(coinbase_utxo, coinbase_kernel) .build(), - 1.into(), + Difficulty::min(), coinbase_value, ); Block::new(template.header.into(), template.body) diff --git a/base_layer/core/tests/helpers/test_blockchain.rs b/base_layer/core/tests/helpers/test_blockchain.rs index c86dfdb78a..865578e610 100644 --- a/base_layer/core/tests/helpers/test_blockchain.rs +++ b/base_layer/core/tests/helpers/test_blockchain.rs @@ -31,6 +31,7 @@ use tari_core::{ blocks::Block, chain_storage::{BlockAddResult, BlockchainDatabase, ChainStorageError}, consensus::ConsensusManager, + proof_of_work::Difficulty, test_helpers::blockchain::TempDatabase, transactions::{test_helpers::TestKeyManager, transaction_components::WalletOutput}, }; @@ -99,7 +100,10 @@ impl TestBlockchain { let mut new_block = self.store.prepare_new_block(template).unwrap(); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, block.difficulty.unwrap_or(1).into()); + find_header_with_achieved_difficulty( + &mut new_block.header, + Difficulty::from_u64(block.difficulty.unwrap_or(1)).unwrap(), + ); (new_block, output) } diff --git a/base_layer/core/tests/tests/block_validation.rs b/base_layer/core/tests/tests/block_validation.rs index 26e8b44a72..649f8e572d 100644 --- a/base_layer/core/tests/tests/block_validation.rs +++ b/base_layer/core/tests/tests/block_validation.rs @@ -36,6 +36,7 @@ use tari_core::{ monero_rx, monero_rx::{FixedByteArray, MoneroPowData}, randomx_factory::RandomXFactory, + Difficulty, PowAlgorithm, }, test_helpers::blockchain::{create_store_with_consensus_and_validators, create_test_db}, @@ -96,14 +97,14 @@ async fn test_monero_blocks() { .clear_proof_of_work() .add_proof_of_work(PowAlgorithm::Sha3, PowAlgorithmConstants { max_target_time: 300 * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: 1.into(), - max_difficulty: 1.into(), + min_difficulty: Difficulty::min(), + max_difficulty: Difficulty::min(), target_time: 300, }) .add_proof_of_work(PowAlgorithm::Monero, PowAlgorithmConstants { max_target_time: 200 * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: 1.into(), - max_difficulty: 1.into(), + min_difficulty: Difficulty::min(), + max_difficulty: Difficulty::min(), target_time: 200, }) .with_blockchain_version(0) @@ -403,8 +404,8 @@ async fn test_orphan_body_validation() { // we dont want localnet's 1 difficulty or the full mined difficulty of weather wax but we want some. let sha3_constants = PowAlgorithmConstants { max_target_time: 300 * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: 10.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(10).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }; let consensus_constants = ConsensusConstantsBuilder::new(network) @@ -454,7 +455,7 @@ OutputFeatures::default()), let mut new_block = db.prepare_new_block(template.clone()).unwrap(); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); let achieved_target_diff = header_validator .validate( &*db.db_read_access().unwrap(), @@ -482,7 +483,7 @@ OutputFeatures::default()), let mut new_block = db.prepare_new_block(template.clone()).unwrap(); new_block.header.nonce = OsRng.next_u64(); new_block.header.height = 3; - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); assert!(header_validator .validate( &*db.db_read_access().unwrap(), @@ -525,7 +526,7 @@ OutputFeatures::default()), new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); let achieved_target_diff = header_validator .validate( &*db.db_read_access().unwrap(), @@ -556,7 +557,7 @@ OutputFeatures::default()), new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); let achieved_target_diff = header_validator .validate( &*db.db_read_access().unwrap(), @@ -585,7 +586,7 @@ OutputFeatures::default()), new_block.header.output_mr = FixedHash::zero(); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); let achieved_target_diff = header_validator .validate( &*db.db_read_access().unwrap(), @@ -618,8 +619,8 @@ async fn test_header_validation() { // we dont want localnet's 1 difficulty or the full mined difficulty of weather wax but we want some. let sha3_constants = PowAlgorithmConstants { max_target_time: 300 * LWMA_MAX_BLOCK_TIME_RATIO, - min_difficulty: 20.into(), - max_difficulty: u64::MAX.into(), + min_difficulty: Difficulty::from_u64(20).expect("valid difficulty"), + max_difficulty: Difficulty::max(), target_time: 300, }; let consensus_constants = ConsensusConstantsBuilder::new(network) @@ -667,7 +668,7 @@ OutputFeatures::default()), let mut new_block = db.prepare_new_block(template.clone()).unwrap(); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 20.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(20).unwrap()); assert!(header_validator .validate( &*db.db_read_access().unwrap(), @@ -683,7 +684,7 @@ OutputFeatures::default()), new_block.header.nonce = OsRng.next_u64(); // we take the max ftl time and give 10 seconds for mining then check it, it should still be more than the ftl new_block.header.timestamp = rules.consensus_constants(0).ftl().increase(10); - find_header_with_achieved_difficulty(&mut new_block.header, 20.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(20).unwrap()); assert!(header_validator .validate( &*db.db_read_access().unwrap(), @@ -697,7 +698,7 @@ OutputFeatures::default()), // lets break difficulty let mut new_block = db.prepare_new_block(template).unwrap(); new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); let mut result = header_validator .validate( &*db.db_read_access().unwrap(), @@ -712,7 +713,7 @@ OutputFeatures::default()), while counter < 10 && !result { counter += 1; new_block.header.nonce = OsRng.next_u64(); - find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + find_header_with_achieved_difficulty(&mut new_block.header, Difficulty::from_u64(10).unwrap()); result = header_validator .validate( &*db.db_read_access().unwrap(), diff --git a/base_layer/core/tests/tests/mempool.rs b/base_layer/core/tests/tests/mempool.rs index f552d6687e..22ae656c41 100644 --- a/base_layer/core/tests/tests/mempool.rs +++ b/base_layer/core/tests/tests/mempool.rs @@ -1532,7 +1532,7 @@ async fn block_event_and_reorg_event_handling() { .blockchain_db .prepare_new_block(chain_block(&empty_block, vec![tx1], &consensus_manager, &key_manager).await) .unwrap(); - find_header_with_achieved_difficulty(&mut block1.header, Difficulty::from(1)); + find_header_with_achieved_difficulty(&mut block1.header, Difficulty::from_u64(1).unwrap()); // Add Block1 - tx1 will be moved to the ReorgPool. assert!(bob.local_nci.submit_block(block1.clone(),).await.is_ok()); async_assert_eventually!( @@ -1558,13 +1558,13 @@ async fn block_event_and_reorg_event_handling() { .blockchain_db .prepare_new_block(chain_block(&block1, vec![tx2a, tx3a], &consensus_manager, &key_manager).await) .unwrap(); - find_header_with_achieved_difficulty(&mut block2a.header, Difficulty::from(1)); + find_header_with_achieved_difficulty(&mut block2a.header, Difficulty::from_u64(1).unwrap()); // Block2b also builds on Block1 but has a stronger PoW let mut block2b = bob .blockchain_db .prepare_new_block(chain_block(&block1, vec![tx2b, tx3b], &consensus_manager, &key_manager).await) .unwrap(); - find_header_with_achieved_difficulty(&mut block2b.header, Difficulty::from(10)); + find_header_with_achieved_difficulty(&mut block2b.header, Difficulty::from_u64(10).unwrap()); // Add Block2a - tx2b and tx3b will be discarded as double spends. assert!(bob.local_nci.submit_block(block2a.clone(),).await.is_ok()); diff --git a/base_layer/core/tests/tests/node_comms_interface.rs b/base_layer/core/tests/tests/node_comms_interface.rs index 7606759a48..d588f10dfc 100644 --- a/base_layer/core/tests/tests/node_comms_interface.rs +++ b/base_layer/core/tests/tests/node_comms_interface.rs @@ -33,6 +33,7 @@ use tari_core::{ consensus::ConsensusManager, covenants::Covenant, mempool::{Mempool, MempoolConfig}, + proof_of_work::Difficulty, test_helpers::{ blockchain::{create_store_with_consensus_and_validators_and_config, create_test_blockchain_db}, create_consensus_rules, @@ -322,21 +323,56 @@ async fn inbound_fetch_blocks_before_horizon_height() { let txn = txn_schema!(from: vec![wallet_output], to: vec![MicroTari(5_000), MicroTari(4_000)]); let (txn, _) = spend_utxos(txn, &key_manager).await; - let block1 = append_block(&store, &block0, vec![txn], &consensus_manager, 1.into(), &key_manager) - .await - .unwrap(); - let block2 = append_block(&store, &block1, vec![], &consensus_manager, 1.into(), &key_manager) - .await - .unwrap(); - let block3 = append_block(&store, &block2, vec![], &consensus_manager, 1.into(), &key_manager) - .await - .unwrap(); - let block4 = append_block(&store, &block3, vec![], &consensus_manager, 1.into(), &key_manager) - .await - .unwrap(); - let _block5 = append_block(&store, &block4, vec![], &consensus_manager, 1.into(), &key_manager) - .await - .unwrap(); + let block1 = append_block( + &store, + &block0, + vec![txn], + &consensus_manager, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); + let block2 = append_block( + &store, + &block1, + vec![], + &consensus_manager, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); + let block3 = append_block( + &store, + &block2, + vec![], + &consensus_manager, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); + let block4 = append_block( + &store, + &block3, + vec![], + &consensus_manager, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); + let _block5 = append_block( + &store, + &block4, + vec![], + &consensus_manager, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); if let Ok(NodeCommsResponse::HistoricalBlocks(received_blocks)) = inbound_nch .handle_request(NodeCommsRequest::FetchMatchingBlocks { diff --git a/base_layer/core/tests/tests/node_service.rs b/base_layer/core/tests/tests/node_service.rs index 56f3a46bff..42831669a8 100644 --- a/base_layer/core/tests/tests/node_service.rs +++ b/base_layer/core/tests/tests/node_service.rs @@ -33,7 +33,7 @@ use tari_core::{ blocks::{ChainBlock, NewBlock}, consensus::{ConsensusConstantsBuilder, ConsensusManager, ConsensusManagerBuilder, NetworkConsensus}, mempool::TxStorageResponse, - proof_of_work::{randomx_factory::RandomXFactory, PowAlgorithm}, + proof_of_work::{randomx_factory::RandomXFactory, Difficulty, PowAlgorithm}, transactions::{ tari_amount::{uT, T}, test_helpers::{create_test_core_key_manager_with_memory_db, schema_to_transaction, spend_utxos}, @@ -158,7 +158,7 @@ async fn propagate_and_forward_many_valid_blocks() { &block0, vec![tx01], &rules, - 1.into(), + Difficulty::min(), &key_manager, ) .await @@ -272,9 +272,16 @@ async fn propagate_and_forward_invalid_block_hash() { ) .await; let txs = txs.into_iter().map(|tx| (*tx).clone()).collect(); - let block1 = append_block(&alice_node.blockchain_db, &block0, txs, &rules, 1.into(), &key_manager) - .await - .unwrap(); + let block1 = append_block( + &alice_node.blockchain_db, + &block0, + txs, + &rules, + Difficulty::min(), + &key_manager, + ) + .await + .unwrap(); let block1 = { // Create unknown block hash let mut block = block1.block().clone(); @@ -417,7 +424,7 @@ async fn propagate_and_forward_invalid_block() { &block0, vec![], &rules, - 1.into(), + Difficulty::min(), &key_manager, ) .await @@ -469,10 +476,10 @@ async fn local_get_metadata() { .await; let db = &node.blockchain_db; let block0 = db.fetch_block(0, true).unwrap().try_into_chain_block().unwrap(); - let block1 = append_block(db, &block0, vec![], &consensus_manager, 1.into(), &key_manager) + let block1 = append_block(db, &block0, vec![], &consensus_manager, Difficulty::min(), &key_manager) .await .unwrap(); - let block2 = append_block(db, &block1, vec![], &consensus_manager, 1.into(), &key_manager) + let block2 = append_block(db, &block1, vec![], &consensus_manager, Difficulty::min(), &key_manager) .await .unwrap(); diff --git a/base_layer/core/tests/tests/node_state_machine.rs b/base_layer/core/tests/tests/node_state_machine.rs index da6d0f37d1..301060709f 100644 --- a/base_layer/core/tests/tests/node_state_machine.rs +++ b/base_layer/core/tests/tests/node_state_machine.rs @@ -37,7 +37,7 @@ use tari_core::{ }, consensus::{ConsensusConstantsBuilder, ConsensusManagerBuilder}, mempool::MempoolServiceConfig, - proof_of_work::randomx_factory::RandomXFactory, + proof_of_work::{randomx_factory::RandomXFactory, Difficulty}, test_helpers::blockchain::create_test_blockchain_db, transactions::test_helpers::create_test_core_key_manager_with_memory_db, validation::mocks::MockValidator, @@ -110,9 +110,16 @@ async fn test_listening_lagging() { let mut bob_local_nci = bob_node.local_nci; // Bob Block 1 - no block event - let prev_block = append_block(&bob_db, &prev_block, vec![], &consensus_manager, 3.into(), &key_manager) - .await - .unwrap(); + let prev_block = append_block( + &bob_db, + &prev_block, + vec![], + &consensus_manager, + Difficulty::from_u64(3).unwrap(), + &key_manager, + ) + .await + .unwrap(); // Bob Block 2 - with block event and liveness service metadata update let mut prev_block = bob_db .prepare_new_block(chain_block(prev_block.block(), vec![], &consensus_manager, &key_manager).await) diff --git a/base_layer/mmr/src/sparse_merkle_tree/proofs.rs b/base_layer/mmr/src/sparse_merkle_tree/proofs.rs index 08760a1e63..0130cd6d4c 100644 --- a/base_layer/mmr/src/sparse_merkle_tree/proofs.rs +++ b/base_layer/mmr/src/sparse_merkle_tree/proofs.rs @@ -126,7 +126,7 @@ mod test { let hash = tree.hash().clone(); let proof = tree.build_proof(&key).unwrap(); - assert_eq!(proof.validate_inclusion_proof(&key, &value, &hash), false); + assert!(!proof.validate_inclusion_proof(&key, &value, &hash)); assert!(proof.validate_exclusion_proof(&key, &hash)); tree.upsert(key.clone(), value.clone()).unwrap(); @@ -134,11 +134,8 @@ mod test { let proof = tree.build_proof(&key).unwrap(); assert!(proof.validate_inclusion_proof(&key, &value, &hash)); - assert_eq!( - proof.validate_inclusion_proof(&key, &ValueHash::from([1u8; 32]), &hash), - false - ); - assert_eq!(proof.validate_exclusion_proof(&key, &hash), false); + assert!(!proof.validate_inclusion_proof(&key, &ValueHash::from([1u8; 32]), &hash),); + assert!(!proof.validate_exclusion_proof(&key, &hash)); } #[test] @@ -156,22 +153,16 @@ mod test { // Validate the proof with correct key / value assert!(proof.validate_inclusion_proof(&keys[i], &values[i], &root_hash)); // Show that incorrect value for existing key fails - assert_eq!( - proof.validate_inclusion_proof(&keys[i], &values[(i + 3) % n], &root_hash), - false - ); + assert!(!proof.validate_inclusion_proof(&keys[i], &values[(i + 3) % n], &root_hash),); // Exclusion proof fails - assert_eq!(proof.validate_exclusion_proof(&keys[i], &root_hash), false); + assert!(!proof.validate_exclusion_proof(&keys[i], &root_hash)); }); // Test exclusion proof let unused_keys = random_keys(n, 72); (0..n).for_each(|i| { let proof = tree.build_proof(&unused_keys[i]).unwrap(); assert!(proof.validate_exclusion_proof(&unused_keys[i], &root_hash)); - assert_eq!( - proof.validate_inclusion_proof(&unused_keys[i], &values[i], &root_hash), - false - ); + assert!(!proof.validate_inclusion_proof(&unused_keys[i], &values[i], &root_hash),); }); } } diff --git a/base_layer/tari_mining_helper_ffi/src/lib.rs b/base_layer/tari_mining_helper_ffi/src/lib.rs index 164855f5be..e5e528813f 100644 --- a/base_layer/tari_mining_helper_ffi/src/lib.rs +++ b/base_layer/tari_mining_helper_ffi/src/lib.rs @@ -262,7 +262,14 @@ pub unsafe extern "C" fn share_difficulty(header: *mut ByteVector, error_out: *m return 2; }, }; - let difficulty = sha3x_difficulty(&block_header); + let difficulty = match sha3x_difficulty(&block_header) { + Ok(v) => v, + Err(e) => { + error = MiningHelperError::from(InterfaceError::Conversion(e.to_string())).code; + ptr::swap(error_out, &mut error as *mut c_int); + return 3; + }, + }; difficulty.as_u64() } @@ -281,6 +288,7 @@ pub unsafe extern "C" fn share_difficulty(header: *mut ByteVector, error_out: *m /// 0: Valid Block /// 1: Valid Share /// 2: Invalid Share +/// 3: Invalid Difficulty /// `error_out` - Error code returned, 0 means no error /// /// # Safety @@ -321,7 +329,14 @@ pub unsafe extern "C" fn share_validate( ptr::swap(error_out, &mut error as *mut c_int); return 2; } - let difficulty = sha3x_difficulty(&block_header).as_u64(); + let difficulty = match sha3x_difficulty(&block_header) { + Ok(v) => v.as_u64(), + Err(e) => { + error = MiningHelperError::from(InterfaceError::Conversion(e.to_string())).code; + ptr::swap(error_out, &mut error as *mut c_int); + return 3; + }, + }; if difficulty >= template_difficulty { 0 } else if difficulty >= share_difficulty { @@ -345,7 +360,9 @@ mod tests { use super::*; use crate::{inject_nonce, public_key_hex_validate, share_difficulty, share_validate}; - const MIN_DIFFICULTY: Difficulty = Difficulty::from_u64(1000); + fn min_difficulty() -> Difficulty { + Difficulty::from_u64(1000).expect("Failed to create difficulty") + } fn create_test_block() -> Block { get_genesis_block(Network::LocalNet).block().clone() @@ -356,8 +373,8 @@ mod tests { let mut block = create_test_block(); block.header.nonce = rand::thread_rng().gen(); for _ in 0..20000 { - if sha3x_difficulty(&block.header) >= difficulty { - return Ok((sha3x_difficulty(&block.header), block.header.nonce)); + if sha3x_difficulty(&block.header).unwrap() >= difficulty { + return Ok((sha3x_difficulty(&block.header).unwrap(), block.header.nonce)); } block.header.nonce += 1; } @@ -370,7 +387,7 @@ mod tests { #[test] fn detect_change_in_consensus_encoding() { const NONCE: u64 = 7661716481620366421; - const DIFFICULTY: Difficulty = Difficulty::from_u64(1467); + let difficulty = Difficulty::from_u64(1467).expect("Failed to create difficulty"); unsafe { let mut error = -1; let error_ptr = &mut error as *mut c_int; @@ -382,10 +399,10 @@ mod tests { inject_nonce(byte_vec, NONCE, error_ptr); assert_eq!(error, 0); let result = share_difficulty(byte_vec, error_ptr); - if result != DIFFICULTY.as_u64() { + if result != difficulty.as_u64() { // Use this to generate new NONCE and DIFFICULTY // Use ONLY if you know encoding has changed - let (difficulty, nonce) = generate_nonce_with_min_difficulty(MIN_DIFFICULTY).unwrap(); + let (difficulty, nonce) = generate_nonce_with_min_difficulty(min_difficulty()).unwrap(); eprintln!("nonce = {:?}", nonce); eprintln!("difficulty = {:?}", difficulty); panic!( @@ -400,7 +417,7 @@ mod tests { #[test] fn check_difficulty() { unsafe { - let (difficulty, nonce) = generate_nonce_with_min_difficulty(MIN_DIFFICULTY).unwrap(); + let (difficulty, nonce) = generate_nonce_with_min_difficulty(min_difficulty()).unwrap(); let mut error = -1; let error_ptr = &mut error as *mut c_int; let block = create_test_block(); @@ -436,7 +453,7 @@ mod tests { #[test] fn check_share() { unsafe { - let (difficulty, nonce) = generate_nonce_with_min_difficulty(MIN_DIFFICULTY).unwrap(); + let (difficulty, nonce) = generate_nonce_with_min_difficulty(min_difficulty()).unwrap(); let mut error = -1; let error_ptr = &mut error as *mut c_int; let block = create_test_block();