Skip to content

Commit

Permalink
chore!: change hash to use consensus encoding (#3820)
Browse files Browse the repository at this point in the history
Description
---
change hashes to use consensus encoding
  • Loading branch information
Cifko authored Mar 16, 2022
1 parent a74f574 commit 3a2da1d
Show file tree
Hide file tree
Showing 4 changed files with 145 additions and 55 deletions.
95 changes: 72 additions & 23 deletions base_layer/core/src/blocks/block_header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,20 @@ use serde::{
Serialize,
Serializer,
};
use tari_common_types::types::{BlindingFactor, BlockHash, HashDigest, BLOCK_HASH_LENGTH};
use tari_common_types::{
array::copy_into_fixed_array,
types::{BlindingFactor, BlockHash, HashDigest, BLOCK_HASH_LENGTH},
};
use tari_crypto::tari_utilities::{epoch_time::EpochTime, hex::Hex, ByteArray, Hashable};
use thiserror::Error;

#[cfg(feature = "base_node")]
use crate::blocks::{BlockBuilder, NewBlockHeaderTemplate};
use crate::proof_of_work::{PowAlgorithm, PowError, ProofOfWork};
use crate::{
common::hash_writer::HashWriter,
consensus::ConsensusEncoding,
proof_of_work::{PowAlgorithm, PowError, ProofOfWork},
};

#[derive(Debug, Error)]
pub enum BlockHeaderValidationError {
Expand Down Expand Up @@ -190,21 +197,49 @@ impl BlockHeader {
/// Provides a hash of the header, used for the merge mining.
/// This differs from the normal hash by not hashing the nonce and kernel pow.
pub fn merged_mining_hash(&self) -> Vec<u8> {
HashDigest::new()
.chain(self.version.to_le_bytes())
.chain(self.height.to_le_bytes())
.chain(self.prev_hash.as_bytes())
.chain(self.timestamp.as_u64().to_le_bytes())
.chain(self.input_mr.as_bytes())
.chain(self.output_mr.as_bytes())
.chain(self.output_mmr_size.to_le_bytes())
.chain(self.witness_mr.as_bytes())
.chain(self.kernel_mr.as_bytes())
.chain(self.kernel_mmr_size.to_le_bytes())
.chain(self.total_kernel_offset.as_bytes())
.chain(self.total_script_offset.as_bytes())
.finalize()
.to_vec()
if self.version <= 2 {
// TODO: Remove deprecated header hashing #testnetreset
HashDigest::new()
.chain(self.version.to_le_bytes())
.chain(self.height.to_le_bytes())
.chain(self.prev_hash.as_bytes())
.chain(self.timestamp.as_u64().to_le_bytes())
.chain(self.input_mr.as_bytes())
.chain(self.output_mr.as_bytes())
.chain(self.output_mmr_size.to_le_bytes())
.chain(self.witness_mr.as_bytes())
.chain(self.kernel_mr.as_bytes())
.chain(self.kernel_mmr_size.to_le_bytes())
.chain(self.total_kernel_offset.as_bytes())
.chain(self.total_script_offset.as_bytes())
.finalize()
.to_vec()
} else {
let mut hasher = HashWriter::new(HashDigest::new());
self.version.consensus_encode(&mut hasher).unwrap();
self.height.consensus_encode(&mut hasher).unwrap();
self.prev_hash.consensus_encode(&mut hasher).unwrap();
self.timestamp.as_u64().consensus_encode(&mut hasher).unwrap();
self.input_mr.consensus_encode(&mut hasher).unwrap();
// TODO: Cleanup if/when we migrate to fixed 32-byte array type for hashes
copy_into_fixed_array::<_, 32>(&self.output_mr)
.unwrap()
.consensus_encode(&mut hasher)
.unwrap();
self.output_mmr_size.consensus_encode(&mut hasher).unwrap();
copy_into_fixed_array::<_, 32>(&self.witness_mr)
.unwrap()
.consensus_encode(&mut hasher)
.unwrap();
copy_into_fixed_array::<_, 32>(&self.kernel_mr)
.unwrap()
.consensus_encode(&mut hasher)
.unwrap();
self.kernel_mmr_size.consensus_encode(&mut hasher).unwrap();
self.total_kernel_offset.consensus_encode(&mut hasher).unwrap();
self.total_script_offset.consensus_encode(&mut hasher).unwrap();
hasher.finalize().to_vec()
}
}

#[inline]
Expand Down Expand Up @@ -243,12 +278,26 @@ impl From<NewBlockHeaderTemplate> for BlockHeader {

impl Hashable for BlockHeader {
fn hash(&self) -> Vec<u8> {
HashDigest::new()
.chain(self.merged_mining_hash())
.chain(self.pow.to_bytes())
.chain(self.nonce.to_le_bytes())
.finalize()
.to_vec()
if self.version <= 2 {
HashDigest::new()
.chain(self.merged_mining_hash())
.chain(self.pow.to_bytes())
.chain(self.nonce.to_le_bytes())
.finalize()
.to_vec()
} else {
let mut hasher = HashWriter::new(HashDigest::new());
// TODO: this excludes extraneous length varint used for Vec<u8> since a hash is always 32-bytes. Clean this
// up if we decide to migrate to a fixed 32-byte type
copy_into_fixed_array::<_, 32>(&self.merged_mining_hash())
.unwrap()
.consensus_encode(&mut hasher)
.unwrap();

self.pow.consensus_encode(&mut hasher).unwrap();
self.nonce.consensus_encode(&mut hasher).unwrap();
hasher.finalize().to_vec()
}
}
}

Expand Down
6 changes: 2 additions & 4 deletions base_layer/core/src/blocks/genesis_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,6 @@ use crate::{
},
};

const LATEST_BLOCK_VERSION: u16 = 2;

/// Returns the genesis block for the selected network.
pub fn get_genesis_block(network: Network) -> ChainBlock {
use Network::*;
Expand Down Expand Up @@ -135,7 +133,7 @@ fn get_igor_genesis_block_raw() -> Block {
let timestamp = genesis.timestamp() as u64;
Block {
header: BlockHeader {
version: LATEST_BLOCK_VERSION,
version: 3,
height: 0,
prev_hash: vec![0; BLOCK_HASH_LENGTH],
timestamp: timestamp.into(),
Expand Down Expand Up @@ -276,7 +274,7 @@ fn get_dibbler_genesis_block_raw() -> Block {
let timestamp = genesis.timestamp() as u64;
Block {
header: BlockHeader {
version: LATEST_BLOCK_VERSION,
version: 2,
height: 0,
prev_hash: vec![0; BLOCK_HASH_LENGTH],
timestamp: timestamp.into(),
Expand Down
74 changes: 48 additions & 26 deletions base_layer/core/src/consensus/consensus_constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,32 +414,54 @@ impl ConsensusConstants {
target_time: 200,
});
let (input_version_range, output_version_range, kernel_version_range) = version_zero();
let constants = ConsensusConstants {
effective_from_height: 0,
coinbase_lock_height: 360,
blockchain_version: 2,
valid_blockchain_version_range: 0..=3,
future_time_limit: 540,
difficulty_block_window: 90,
// 65536 = target_block_size / bytes_per_gram = (1024*1024) / 16
// adj. + 95% = 127,795 - this effectively targets ~2Mb blocks closely matching the previous 19500
// weightings
max_block_transaction_weight: 127_795,
median_timestamp_count: 11,
emission_initial: 18_462_816_327 * uT,
emission_decay: &DIBBLER_DECAY_PARAMS,
emission_tail: 800 * T,
max_randomx_seed_height: u64::MAX,
proof_of_work: algos,
faucet_value: (10 * 4000) * T,
transaction_weight: TransactionWeight::v2(),
max_script_byte_size: 2048,
input_version_range,
output_version_range,
kernel_version_range,
};

vec![constants]
vec![
ConsensusConstants {
effective_from_height: 0,
coinbase_lock_height: 360,
blockchain_version: 2,
valid_blockchain_version_range: 0..=3,
future_time_limit: 540,
difficulty_block_window: 90,
// 65536 = target_block_size / bytes_per_gram = (1024*1024) / 16
// adj. + 95% = 127,795 - this effectively targets ~2Mb blocks closely matching the previous 19500
// weightings
max_block_transaction_weight: 127_795,
median_timestamp_count: 11,
emission_initial: 18_462_816_327 * uT,
emission_decay: &DIBBLER_DECAY_PARAMS,
emission_tail: 800 * T,
max_randomx_seed_height: u64::MAX,
proof_of_work: algos.clone(),
faucet_value: (10 * 4000) * T,
transaction_weight: TransactionWeight::v2(),
max_script_byte_size: 2048,
input_version_range: input_version_range.clone(),
output_version_range: output_version_range.clone(),
kernel_version_range: kernel_version_range.clone(),
},
ConsensusConstants {
effective_from_height: 23000,
coinbase_lock_height: 360,
// CHANGE: Use v3 blocks from effective height
blockchain_version: 3,
valid_blockchain_version_range: 0..=3,
future_time_limit: 540,
difficulty_block_window: 90,
max_block_transaction_weight: 127_795,
median_timestamp_count: 11,
emission_initial: 18_462_816_327 * uT,
emission_decay: &DIBBLER_DECAY_PARAMS,
emission_tail: 800 * T,
max_randomx_seed_height: u64::MAX,
proof_of_work: algos,
faucet_value: (10 * 4000) * T,
transaction_weight: TransactionWeight::v2(),
max_script_byte_size: 2048,
input_version_range,
output_version_range,
kernel_version_range,
},
]
}

pub fn mainnet() -> Vec<Self> {
Expand Down
25 changes: 23 additions & 2 deletions base_layer/core/src/proof_of_work/proof_of_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,19 @@
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

use std::fmt::{Display, Error, Formatter};
use std::{
fmt::{Display, Error, Formatter},
io::{self, Write},
};

use bytes::BufMut;
use serde::{Deserialize, Serialize};
use tari_crypto::tari_utilities::hex::Hex;

use crate::proof_of_work::PowAlgorithm;
use crate::{
consensus::{ConsensusEncoding, ConsensusEncodingSized},
proof_of_work::PowAlgorithm,
};

pub trait AchievedDifficulty {}

Expand Down Expand Up @@ -89,6 +95,21 @@ impl Display for ProofOfWork {
}
}

impl ConsensusEncoding for ProofOfWork {
fn consensus_encode<W: Write>(&self, writer: &mut W) -> Result<usize, io::Error> {
writer.write_all(&[self.pow_algo as u8])?;
let mut written = 1;
written += self.pow_data.consensus_encode(writer)?;
Ok(written)
}
}

impl ConsensusEncodingSized for ProofOfWork {
fn consensus_encode_exact_size(&self) -> usize {
1 + self.pow_data.consensus_encode_exact_size()
}
}

#[cfg(test)]
mod test {
use crate::proof_of_work::proof_of_work::{PowAlgorithm, ProofOfWork};
Expand Down

0 comments on commit 3a2da1d

Please sign in to comment.