Skip to content

Commit

Permalink
feat!: add domain consensus hashing to all previous uses of default c…
Browse files Browse the repository at this point in the history
…onsensus hashing (#4522)

Description
---
- Added domain hashing to all previous uses of default consensus hashing.
- Changed `output_mr`, `witness_mr`, `kernel_mr` and `input_mr` types in  `pub struct BlockHeader` to be `FixedHash` instead of variable length  `BlockHash`.

Motivation and Context
---
As per issue #4396.

How Has This Been Tested?
---
- Passed all unit tests
- Passed all cucumber tests
  • Loading branch information
hansieodendaal committed Aug 23, 2022
1 parent 69e8d0b commit 1885509
Show file tree
Hide file tree
Showing 25 changed files with 348 additions and 253 deletions.
18 changes: 9 additions & 9 deletions applications/tari_app_grpc/src/conversions/block_header.rs
Expand Up @@ -22,7 +22,7 @@

use std::convert::TryFrom;

use tari_common_types::types::BlindingFactor;
use tari_common_types::types::{BlindingFactor, FixedHash};
use tari_core::{blocks::BlockHeader, proof_of_work::ProofOfWork};
use tari_utilities::{ByteArray, Hashable};

Expand All @@ -40,12 +40,12 @@ impl From<BlockHeader> for grpc::BlockHeader {
height: h.height,
prev_hash: h.prev_hash,
timestamp: datetime_to_timestamp(h.timestamp),
input_mr: h.input_mr,
output_mr: h.output_mr,
input_mr: h.input_mr.to_vec(),
output_mr: h.output_mr.to_vec(),
output_mmr_size: h.output_mmr_size,
kernel_mr: h.kernel_mr,
kernel_mr: h.kernel_mr.to_vec(),
kernel_mmr_size: h.kernel_mmr_size,
witness_mr: h.witness_mr,
witness_mr: h.witness_mr.to_vec(),
total_kernel_offset: h.total_kernel_offset.to_vec(),
total_script_offset: h.total_script_offset.to_vec(),
nonce: h.nonce,
Expand Down Expand Up @@ -81,11 +81,11 @@ impl TryFrom<grpc::BlockHeader> for BlockHeader {
height: header.height,
prev_hash: header.prev_hash,
timestamp,
input_mr: header.input_mr,
output_mr: header.output_mr,
witness_mr: header.witness_mr,
input_mr: FixedHash::try_from(header.input_mr).map_err(|err| err.to_string())?,
output_mr: FixedHash::try_from(header.output_mr).map_err(|err| err.to_string())?,
witness_mr: FixedHash::try_from(header.witness_mr).map_err(|err| err.to_string())?,
output_mmr_size: header.output_mmr_size,
kernel_mr: header.kernel_mr,
kernel_mr: FixedHash::try_from(header.kernel_mr).expect("Array size 32 cannot fail"),
kernel_mmr_size: header.kernel_mmr_size,
total_kernel_offset,
total_script_offset,
Expand Down
6 changes: 3 additions & 3 deletions applications/tari_base_node/src/grpc/base_node_grpc_server.rs
Expand Up @@ -542,7 +542,7 @@ impl tari_rpc::base_node_server::BaseNode for BaseNodeGrpcServer {
};
// construct response
let block_hash = new_block.hash();
let mining_hash = new_block.header.merged_mining_hash();
let mining_hash = new_block.header.merged_mining_hash().to_vec();
let block: Option<tari_rpc::Block> = Some(
new_block
.try_into()
Expand Down Expand Up @@ -587,7 +587,7 @@ impl tari_rpc::base_node_server::BaseNode for BaseNodeGrpcServer {
};
// construct response
let block_hash = new_block.hash();
let mining_hash = new_block.header.merged_mining_hash();
let mining_hash = new_block.header.merged_mining_hash().to_vec();

let (header, block_body) = new_block.into_header_body();
let mut header_bytes = Vec::new();
Expand All @@ -599,7 +599,7 @@ impl tari_rpc::base_node_server::BaseNode for BaseNodeGrpcServer {
header: header_bytes,
block_body: block_body_bytes,
merge_mining_hash: mining_hash,
utxo_mr: header.output_mr,
utxo_mr: header.output_mr.to_vec(),
};
debug!(target: LOG_TARGET, "Sending GetNewBlockBlob response to client");
Ok(Response::new(response))
Expand Down
2 changes: 0 additions & 2 deletions applications/tari_console_wallet/src/wallet_modes.rs
Expand Up @@ -166,10 +166,8 @@ pub(crate) fn parse_command_file(script: String) -> Result<Vec<CliCommands>, Exi
// skip empty lines and 'comments' starting with #
if !command.trim().is_empty() && !command.trim().starts_with('#') {
let command_trimmed = cli_parse_prefix.to_owned() + " " + command.trim();
println!("\ncommand: {}", command_trimmed);
let parse_vec: Vec<&str> = command_trimmed.split(' ').collect();
let cli_parsed = Cli::try_parse_from(&parse_vec);
println!("cli_parsed: {:?}\n", cli_parsed);
match cli_parsed {
Ok(result) => {
if let Some(sub_command) = result.command2 {
Expand Down
10 changes: 1 addition & 9 deletions base_layer/common_types/src/types/fixed_hash.rs
Expand Up @@ -26,12 +26,10 @@ use std::{
ops::{Deref, DerefMut},
};

use digest::{consts::U32, generic_array, Digest};
use digest::{consts::U32, generic_array};
use serde::{Deserialize, Serialize};
use tari_utilities::hex::{Hex, HexError};

use crate::types::Blake256;

const ZERO_HASH: [u8; FixedHash::byte_size()] = [0u8; FixedHash::byte_size()];

#[derive(thiserror::Error, Debug)]
Expand All @@ -53,12 +51,6 @@ impl FixedHash {
pub fn as_slice(&self) -> &[u8] {
&self.0
}

/// Hashes the bytes and returns the resulting `FixedHash`. Generally only be used as a convenience function for
/// tests.
pub fn hash_bytes<T: AsRef<[u8]>>(bytes: T) -> Self {
Blake256::default().chain(bytes).finalize().into()
}
}

impl From<[u8; FixedHash::byte_size()]> for FixedHash {
Expand Down
Expand Up @@ -335,7 +335,7 @@ impl<'a, B: BlockchainBackend + 'static> HorizonStateSynchronization<'a, B> {
}

let mmr_root = kernel_mmr.get_merkle_root()?;
if mmr_root != current_header.header().kernel_mr {
if mmr_root.as_slice() != current_header.header().kernel_mr.as_slice() {
return Err(HorizonSyncError::InvalidMmrRoot {
mmr_tree: MmrTree::Kernel,
at_height: current_header.height(),
Expand Down Expand Up @@ -601,7 +601,7 @@ impl<'a, B: BlockchainBackend + 'static> HorizonStateSynchronization<'a, B> {
let output_mmr = MutablePrunedOutputMmr::new(pruned_output_set.clone(), bitmap.clone())?;

let mmr_root = output_mmr.get_merkle_root()?;
if mmr_root != current_header.header().output_mr {
if mmr_root.as_slice() != current_header.header().output_mr.as_slice() {
return Err(HorizonSyncError::InvalidMmrRoot {
mmr_tree: MmrTree::Utxo,
at_height: current_header.height(),
Expand All @@ -611,7 +611,7 @@ impl<'a, B: BlockchainBackend + 'static> HorizonStateSynchronization<'a, B> {
}

let mmr_root = witness_mmr.get_merkle_root()?;
if mmr_root != current_header.header().witness_mr {
if mmr_root.as_slice() != current_header.header().witness_mr.as_slice() {
return Err(HorizonSyncError::InvalidMmrRoot {
mmr_tree: MmrTree::Witness,
at_height: current_header.height(),
Expand Down
146 changes: 94 additions & 52 deletions base_layer/core/src/blocks/block_header.rs
Expand Up @@ -55,16 +55,17 @@ use serde::{
Serializer,
};
use tari_common_types::{
array::{copy_into_fixed_array, copy_into_fixed_array_lossy},
types::{BlindingFactor, BlockHash, BLOCK_HASH_LENGTH},
array::copy_into_fixed_array_lossy,
types::{BlindingFactor, BlockHash, FixedHash, BLOCK_HASH_LENGTH},
};
use tari_utilities::{epoch_time::EpochTime, hex::Hex, ByteArray, Hashable};
use thiserror::Error;

#[cfg(feature = "base_node")]
use crate::blocks::{BlockBuilder, NewBlockHeaderTemplate};
use crate::{
consensus::{ConsensusDecoding, ConsensusEncoding, ConsensusEncodingSized, ConsensusHasher},
blocks::BlocksHashDomain,
consensus::{ConsensusDecoding, ConsensusEncoding, ConsensusEncodingSized, DomainSeparatedConsensusHasher},
proof_of_work::{PowAlgorithm, PowError, ProofOfWork},
};

Expand Down Expand Up @@ -96,26 +97,27 @@ pub struct BlockHeader {
pub height: u64,
/// Hash of the block previous to this in the chain.
#[serde(with = "hash_serializer")]
// TODO: Change type to 'FixedHash'
pub prev_hash: BlockHash,
/// Timestamp at which the block was built.
pub timestamp: EpochTime,
/// This is the UTXO merkle root of the outputs
/// This is calculated as Hash (txo MMR root || roaring bitmap hash of UTXO indices)
#[serde(with = "hash_serializer")]
pub output_mr: BlockHash,
#[serde(with = "fixed_hash_serializer")]
pub output_mr: FixedHash,
/// This is the MMR root of the witness proofs
#[serde(with = "hash_serializer")]
pub witness_mr: BlockHash,
#[serde(with = "fixed_hash_serializer")]
pub witness_mr: FixedHash,
/// The size (number of leaves) of the output and range proof MMRs at the time of this header
pub output_mmr_size: u64,
/// This is the MMR root of the kernels
#[serde(with = "hash_serializer")]
pub kernel_mr: BlockHash,
#[serde(with = "fixed_hash_serializer")]
pub kernel_mr: FixedHash,
/// The number of MMR leaves in the kernel MMR
pub kernel_mmr_size: u64,
/// This is the Merkle root of the inputs in this block
#[serde(with = "hash_serializer")]
pub input_mr: BlockHash,
#[serde(with = "fixed_hash_serializer")]
pub input_mr: FixedHash,
/// Sum of kernel offsets for all kernels in this block.
pub total_kernel_offset: BlindingFactor,
/// Sum of script offsets for all kernels in this block.
Expand All @@ -134,12 +136,12 @@ impl BlockHeader {
height: 0,
prev_hash: vec![0; BLOCK_HASH_LENGTH],
timestamp: EpochTime::now(),
output_mr: vec![0; BLOCK_HASH_LENGTH],
witness_mr: vec![0; BLOCK_HASH_LENGTH],
output_mr: FixedHash::zero(),
witness_mr: FixedHash::zero(),
output_mmr_size: 0,
kernel_mr: vec![0; BLOCK_HASH_LENGTH],
kernel_mr: FixedHash::zero(),
kernel_mmr_size: 0,
input_mr: vec![0; BLOCK_HASH_LENGTH],
input_mr: FixedHash::zero(),
total_kernel_offset: BlindingFactor::default(),
total_script_offset: BlindingFactor::default(),
nonce: 0,
Expand All @@ -157,12 +159,12 @@ impl BlockHeader {
height: prev.height + 1,
prev_hash,
timestamp: EpochTime::now(),
output_mr: vec![0; BLOCK_HASH_LENGTH],
witness_mr: vec![0; BLOCK_HASH_LENGTH],
output_mr: FixedHash::zero(),
witness_mr: FixedHash::zero(),
output_mmr_size: prev.output_mmr_size,
kernel_mr: vec![0; BLOCK_HASH_LENGTH],
kernel_mr: FixedHash::zero(),
kernel_mmr_size: prev.kernel_mmr_size,
input_mr: vec![0; BLOCK_HASH_LENGTH],
input_mr: FixedHash::zero(),
total_kernel_offset: BlindingFactor::default(),
total_script_offset: BlindingFactor::default(),
nonce: 0,
Expand Down Expand Up @@ -215,22 +217,22 @@ impl BlockHeader {

/// Provides a hash of the header, used for the merge mining.
/// This differs from the normal hash by not hashing the nonce and kernel pow.
pub fn merged_mining_hash(&self) -> Vec<u8> {
ConsensusHasher::default()
.chain(&self.version)
.chain(&self.height)
.chain(&self.prev_hash)
.chain(&self.timestamp)
.chain(&self.input_mr)
// TODO: Cleanup if/when we migrate to fixed 32-byte array type for hashes
.chain(&copy_into_fixed_array_lossy::<_, 32>(&self.output_mr))
.chain(&self.output_mmr_size)
.chain(&copy_into_fixed_array_lossy::<_, 32>(&self.witness_mr))
.chain(&copy_into_fixed_array_lossy::<_, 32>(&self.kernel_mr))
.chain(&self.kernel_mmr_size)
.chain(&self.total_kernel_offset)
.chain(&self.total_script_offset)
.finalize().to_vec()
pub fn merged_mining_hash(&self) -> FixedHash {
DomainSeparatedConsensusHasher::<BlocksHashDomain>::new("block_header")
.chain(&self.version)
.chain(&self.height)
.chain(&self.prev_hash)
.chain(&self.timestamp)
.chain(&self.input_mr)
.chain(&self.output_mr)
.chain(&self.output_mmr_size)
.chain(&self.witness_mr)
.chain(&self.kernel_mr)
.chain(&self.kernel_mmr_size)
.chain(&self.total_kernel_offset)
.chain(&self.total_script_offset)
.finalize()
.into()
}

#[inline]
Expand All @@ -257,13 +259,13 @@ impl From<NewBlockHeaderTemplate> for BlockHeader {
height: header_template.height,
prev_hash: header_template.prev_hash,
timestamp: EpochTime::now(),
output_mr: vec![],
witness_mr: vec![],
output_mr: FixedHash::zero(),
witness_mr: FixedHash::zero(),
// TODO: put mmr sizes in template
output_mmr_size: 0,
kernel_mr: vec![],
kernel_mr: FixedHash::zero(),
kernel_mmr_size: 0,
input_mr: vec![],
input_mr: FixedHash::zero(),
total_kernel_offset: header_template.total_kernel_offset,
total_script_offset: header_template.total_script_offset,
nonce: 0,
Expand All @@ -274,13 +276,12 @@ impl From<NewBlockHeaderTemplate> for BlockHeader {

impl Hashable for BlockHeader {
fn hash(&self) -> Vec<u8> {
ConsensusHasher::default()
// TODO: this excludes extraneous length varint used for Vec<u8> since a hash is always 32-bytes. Clean this
// up if we decide to migrate to a fixed 32-byte type
.chain(&copy_into_fixed_array::<_, 32>(&self.merged_mining_hash()).unwrap())
DomainSeparatedConsensusHasher::<BlocksHashDomain>::new("block_header")
.chain(&self.merged_mining_hash().as_slice())
.chain(&self.pow)
.chain(&self.nonce)
.finalize().to_vec()
.finalize()
.to_vec()
}
}

Expand Down Expand Up @@ -364,18 +365,59 @@ pub(crate) mod hash_serializer {
}
}

pub(crate) mod fixed_hash_serializer {
use tari_utilities::hex::Hex;

use super::*;

#[allow(clippy::ptr_arg)]
pub fn serialize<S>(bytes: &FixedHash, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
if serializer.is_human_readable() {
bytes.to_hex().serialize(serializer)
} else {
serializer.serialize_bytes(bytes.as_bytes())
}
}

pub fn deserialize<'de, D>(deserializer: D) -> Result<FixedHash, D::Error>
where D: Deserializer<'de> {
struct BlockHashVisitor;

impl<'de> Visitor<'de> for BlockHashVisitor {
type Value = FixedHash;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A block header hash in binary format")
}

fn visit_bytes<E>(self, v: &[u8]) -> Result<FixedHash, E>
where E: de::Error {
FixedHash::try_from(v).map_err(E::custom)
}
}

if deserializer.is_human_readable() {
let s = String::deserialize(deserializer)?;
FixedHash::from_hex(&s).map_err(de::Error::custom)
} else {
deserializer.deserialize_bytes(BlockHashVisitor)
}
}
}

impl ConsensusEncoding for BlockHeader {
fn consensus_encode<W: Write>(&self, writer: &mut W) -> Result<(), io::Error> {
self.version.consensus_encode(writer)?;
self.height.consensus_encode(writer)?;
copy_into_fixed_array_lossy::<_, 32>(&self.prev_hash).consensus_encode(writer)?;
self.timestamp.consensus_encode(writer)?;
copy_into_fixed_array_lossy::<_, 32>(&self.output_mr).consensus_encode(writer)?;
copy_into_fixed_array_lossy::<_, 32>(&self.witness_mr).consensus_encode(writer)?;
self.output_mr.as_slice().consensus_encode(writer)?;
self.witness_mr.as_slice().consensus_encode(writer)?;
self.output_mmr_size.consensus_encode(writer)?;
copy_into_fixed_array_lossy::<_, 32>(&self.kernel_mr).consensus_encode(writer)?;
self.kernel_mr.as_slice().consensus_encode(writer)?;
self.kernel_mmr_size.consensus_encode(writer)?;
copy_into_fixed_array_lossy::<_, 32>(&self.input_mr).consensus_encode(writer)?;
self.input_mr.as_slice().consensus_encode(writer)?;
self.total_kernel_offset.consensus_encode(writer)?;
self.total_script_offset.consensus_encode(writer)?;
self.nonce.consensus_encode(writer)?;
Expand All @@ -393,12 +435,12 @@ impl ConsensusDecoding for BlockHeader {
header.height = u64::consensus_decode(reader)?;
header.prev_hash = <[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?.to_vec();
header.timestamp = EpochTime::consensus_decode(reader)?;
header.output_mr = <[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?.to_vec();
header.witness_mr = <[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?.to_vec();
header.output_mr = FixedHash::from(<[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?);
header.witness_mr = FixedHash::from(<[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?);
header.output_mmr_size = u64::consensus_decode(reader)?;
header.kernel_mr = <[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?.to_vec();
header.kernel_mr = FixedHash::from(<[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?);
header.kernel_mmr_size = u64::consensus_decode(reader)?;
header.input_mr = <[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?.to_vec();
header.input_mr = FixedHash::from(<[u8; 32] as ConsensusDecoding>::consensus_decode(reader)?);
header.total_kernel_offset = BlindingFactor::consensus_decode(reader)?;
header.total_script_offset = BlindingFactor::consensus_decode(reader)?;
header.nonce = u64::consensus_decode(reader)?;
Expand Down

0 comments on commit 1885509

Please sign in to comment.