Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: allow 0-conf in blockchain db #3680

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion base_layer/core/src/chain_storage/blockchain_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -860,7 +860,7 @@ where B: BlockchainBackend

if block_add_result.was_chain_modified() {
// If blocks were added and the node is in pruned mode, perform pruning
prune_database_if_needed(&mut *db, self.config.pruning_horizon, self.config.pruning_interval)?
prune_database_if_needed(&mut *db, self.config.pruning_horizon, self.config.pruning_interval)?;
}

info!(
Expand Down
75 changes: 54 additions & 21 deletions base_layer/core/src/chain_storage/lmdb_db/lmdb_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ use crate::{
lmdb_replace,
},
TransactionInputRowData,
TransactionInputRowDataRef,
TransactionKernelRowData,
TransactionOutputRowData,
},
Expand Down Expand Up @@ -637,21 +638,26 @@ impl LMDBDatabase {
&self,
txn: &WriteTransaction<'_>,
height: u64,
header_hash: HashOutput,
input: TransactionInput,
header_hash: &HashOutput,
input: &TransactionInput,
mmr_position: u32,
) -> Result<(), ChainStorageError> {
lmdb_delete(
txn,
&self.utxo_commitment_index,
input.commitment().as_bytes(),
"utxo_commitment_index",
)?;
)
.or_else(|err| match err {
// The commitment may not yet be included in the DB in the 0-conf transaction case
ChainStorageError::ValueNotFound { .. } => Ok(()),
_ => Err(err),
})?;
lmdb_insert(
txn,
&self.deleted_txo_mmr_position_to_height_index,
&mmr_position,
&(height, &header_hash),
&(height, header_hash),
"deleted_txo_mmr_position_to_height_index",
)?;

Expand Down Expand Up @@ -680,6 +686,7 @@ impl LMDBDatabase {
});
}

// TODO: 0-conf is not currently supported for transactions with unique_id set
lmdb_delete(txn, &self.unique_id_index, key.as_bytes(), "unique_id_index")?;
key.set_deleted_height(height);
debug!(
Expand All @@ -702,16 +709,16 @@ impl LMDBDatabase {
}

let hash = input.hash();
let key = InputKey::new(&header_hash, mmr_position, &hash);
let key = InputKey::new(header_hash, mmr_position, &hash);
lmdb_insert(
txn,
&*self.inputs_db,
key.as_bytes(),
&TransactionInputRowData {
&TransactionInputRowDataRef {
input,
header_hash,
mmr_position,
hash,
hash: &hash,
},
"inputs_db",
)
Expand Down Expand Up @@ -1169,34 +1176,60 @@ impl LMDBDatabase {
let mut output_mmr = MutableMmr::<HashDigest, _>::new(pruned_output_set, Bitmap::create())?;
let mut witness_mmr = MerkleMountainRange::<HashDigest, _>::new(pruned_proof_set);

let leaf_count = witness_mmr.get_leaf_count()?;

// Output hashes added before inputs so that inputs can spend outputs in this transaction (0-conf and combined)
let outputs = outputs
.into_iter()
.enumerate()
.map(|(i, output)| {
output_mmr.push(output.hash())?;
witness_mmr.push(output.witness_hash())?;
Ok((output, leaf_count + i + 1))
})
.collect::<Result<Vec<_>, ChainStorageError>>()?;

let mut spent_zero_conf_commitments = Vec::new();
// unique_id_index expects inputs to be inserted before outputs
for input in inputs {
let index = self
.fetch_mmr_leaf_index(&**txn, MmrTree::Utxo, &input.output_hash())?
.ok_or(ChainStorageError::UnspendableInput)?;
for input in &inputs {
let output_hash = input.output_hash();
let index = match self.fetch_mmr_leaf_index(&**txn, MmrTree::Utxo, &output_hash)? {
Some(index) => index,
None => match output_mmr.find_leaf_index(&output_hash)? {
Some(index) => {
debug!(
target: LOG_TARGET,
"Input {} spends output from current block (0-conf)", input
);
spent_zero_conf_commitments.push(&input.commitment);
index
},
None => return Err(ChainStorageError::UnspendableInput),
},
};
if !output_mmr.delete(index) {
return Err(ChainStorageError::InvalidOperation(format!(
"Could not delete index {} from the output MMR",
index
)));
}
debug!(target: LOG_TARGET, "Inserting input `{}`", input.commitment.to_hex());
self.insert_input(txn, current_header_at_height.height, block_hash.clone(), input, index)?;
self.insert_input(txn, current_header_at_height.height, &block_hash, input, index)?;
}

for output in outputs {
output_mmr.push(output.hash())?;
witness_mmr.push(output.witness_hash())?;
for (output, mmr_count) in outputs {
debug!(target: LOG_TARGET, "Inserting output `{}`", output.commitment.to_hex());
self.insert_output(
self.insert_output(txn, &block_hash, header.height, &output, mmr_count as u32 - 1)?;
}

for commitment in spent_zero_conf_commitments {
lmdb_delete(
txn,
&block_hash,
header.height,
&output,
(witness_mmr.get_leaf_count()? - 1) as u32,
&self.utxo_commitment_index,
commitment.as_bytes(),
"utxo_commitment_index",
)?;
}

// Merge current deletions with the tip bitmap
let deleted_at_current_height = output_mmr.deleted().clone();
// Merge the new indexes with the blockchain deleted bitmap
Expand Down
12 changes: 12 additions & 0 deletions base_layer/core/src/chain_storage/lmdb_db/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,18 @@ pub(crate) struct TransactionOutputRowData {
pub mined_height: u64,
}

/// Transaction input row data taking references and used for serialization.
/// This struct must mirror the fields in `TransactionInputRowData`
#[derive(Serialize, Debug)]
pub(crate) struct TransactionInputRowDataRef<'a> {
pub input: &'a TransactionInput,
#[allow(clippy::ptr_arg)]
pub header_hash: &'a HashOutput,
pub mmr_position: u32,
#[allow(clippy::ptr_arg)]
pub hash: &'a HashOutput,
}

#[derive(Serialize, Deserialize, Debug)]
pub(crate) struct TransactionInputRowData {
pub input: TransactionInput,
Expand Down
117 changes: 14 additions & 103 deletions base_layer/core/src/chain_storage/tests/blockchain_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
use std::sync::Arc;

use rand::rngs::OsRng;
use tari_common_types::types::PublicKey;
use tari_crypto::keys::PublicKey as PublicKeyTrait;
use tari_test_utils::unpack_enum;
use tari_utilities::Hashable;
Expand All @@ -39,7 +40,7 @@ use crate::{
transactions::{
tari_amount::T,
test_helpers::{schema_to_transaction, TransactionSchema},
transaction::{OutputFeatures, Transaction, UnblindedOutput},
transaction::{OutputFeatures, OutputFlags, Transaction, UnblindedOutput},
},
txn_schema,
};
Expand Down Expand Up @@ -375,13 +376,11 @@ mod fetch_block_hashes_from_header_tip {
}

mod add_block {
use tari_common_types::types::PublicKey;
use tari_utilities::hex::Hex;

use super::*;
use crate::{transactions::transaction::OutputFlags, validation::ValidationError};

#[test]
#[ignore = "broken after validator node merge"]
fn it_rejects_duplicate_commitments_in_the_utxo_set() {
let db = setup();
let (blocks, outputs) = add_many_chained_blocks(5, &db);
Expand All @@ -407,14 +406,12 @@ mod add_block {
script: tari_crypto::script![Nop],
input_data: None,
}]);
let commitment_hex = txns[0].body.outputs()[0].commitment.to_hex();

let (block, _) = create_next_block(&db, &prev_block, txns);
let err = db.add_block(block.clone()).unwrap_err();
unpack_enum!(
ChainStorageError::ValidationError {
source: ValidationError::ContainsTxO
} = err
);
unpack_enum!(ChainStorageError::KeyExists { key, .. } = err);
assert_eq!(key, commitment_hex);
// Check rollback
let header = db.fetch_header(block.header.height).unwrap();
assert!(header.is_none());
Expand Down Expand Up @@ -481,91 +478,6 @@ mod add_block {
let (block, _) = create_next_block(&db, prev_block, transactions);
db.add_block(block).unwrap().assert_added();
}

#[test]
#[ignore = "broken after validator node merge"]
fn it_rejects_duplicate_mint_or_burn_transactions_per_unique_id() {
let db = setup();
let (blocks, outputs) = add_many_chained_blocks(1, &db);

let prev_block = blocks.last().unwrap();

let (_, asset_pk) = PublicKey::random_keypair(&mut OsRng);
let unique_id = vec![1u8; 3];
let features = OutputFeatures::for_minting(asset_pk.clone(), Default::default(), unique_id.clone(), None);
let (txns, _) = schema_to_transaction(&[txn_schema!(
from: vec![outputs[0].clone()],
to: vec![10 * T, 10 * T],
features: features
)]);

let (block, _) = create_next_block(&db, prev_block, txns);
let err = db.add_block(block).unwrap_err();

unpack_enum!(
ChainStorageError::ValidationError {
source: ValidationError::ContainsDuplicateUtxoUniqueID
} = err
);

let features = OutputFeatures {
flags: OutputFlags::BURN_NON_FUNGIBLE,
parent_public_key: Some(asset_pk),
unique_id: Some(unique_id),
..Default::default()
};
let (txns, _) = schema_to_transaction(&[txn_schema!(
from: vec![outputs[0].clone()],
to: vec![10 * T, 10 * T],
features: features
)]);

let (block, _) = create_next_block(&db, prev_block, txns);
let err = db.add_block(block).unwrap_err();

unpack_enum!(
ChainStorageError::ValidationError {
source: ValidationError::ContainsDuplicateUtxoUniqueID
} = err
);
}

#[test]
#[ignore = "broken after validator node merge"]
fn it_rejects_duplicate_mint_or_burn_transactions_in_blockchain() {
let db = setup();
let (blocks, outputs) = add_many_chained_blocks(1, &db);

let prev_block = blocks.last().unwrap();

let (_, asset_pk) = PublicKey::random_keypair(&mut OsRng);
let unique_id = vec![1u8; 3];
let features = OutputFeatures::for_minting(asset_pk.clone(), Default::default(), unique_id.clone(), None);
let (txns, outputs) = schema_to_transaction(&[txn_schema!(
from: vec![outputs[0].clone()],
to: vec![10 * T],
features: features
)]);

let (block, _) = create_next_block(&db, prev_block, txns);
db.add_block(block.clone()).unwrap().assert_added();

let features = OutputFeatures::for_minting(asset_pk, Default::default(), unique_id, None);
let (txns, _) = schema_to_transaction(&[txn_schema!(
from: vec![outputs[0].clone()],
to: vec![T],
features: features
)]);

let (block, _) = create_next_block(&db, &block, txns);
let err = db.add_block(block).unwrap_err();

unpack_enum!(
ChainStorageError::ValidationError {
source: ValidationError::ContainsDuplicateUtxoUniqueID
} = err
);
}
}

mod get_stats {
Expand All @@ -583,14 +495,13 @@ mod fetch_total_size_stats {
use super::*;

#[test]
#[ignore = "broken after validator node merge"]
fn it_measures_the_number_of_entries() {
let db = setup();
let _ = add_many_chained_blocks(2, &db);
let stats = db.fetch_total_size_stats().unwrap();
assert_eq!(
stats.sizes().iter().find(|s| s.name == "utxos_db").unwrap().num_entries,
2
3
);
}
}
Expand Down Expand Up @@ -734,28 +645,29 @@ mod clear_all_pending_headers {
}

#[test]
#[ignore = "broken after validator node merge"]
fn it_clears_headers_after_tip() {
let db = setup();
let _ = add_many_chained_blocks(2, &db);
let prev_block = db.fetch_block(2).unwrap();
let mut prev_accum = prev_block.accumulated_data.clone();
let mut prev_block = Arc::new(prev_block.try_into_block().unwrap());
let mut prev_header = prev_block.try_into_chain_block().unwrap().to_chain_header();
let headers = (0..5)
.map(|_| {
let (block, _) = create_next_block(&db, &prev_block, vec![]);
let mut header = BlockHeader::from_previous(prev_header.header());
header.kernel_mmr_size += 1;
header.output_mmr_size += 1;
let accum = BlockHeaderAccumulatedData::builder(&prev_accum)
.with_hash(block.hash())
.with_hash(header.hash())
.with_achieved_target_difficulty(
AchievedTargetDifficulty::try_construct(PowAlgorithm::Sha3, 0.into(), 0.into()).unwrap(),
)
.with_total_kernel_offset(Default::default())
.build()
.unwrap();

let header = ChainHeader::try_construct(block.header.clone(), accum.clone()).unwrap();
let header = ChainHeader::try_construct(header, accum.clone()).unwrap();

prev_block = block;
prev_header = header.clone();
prev_accum = accum;
header
})
Expand Down Expand Up @@ -786,7 +698,6 @@ mod fetch_utxo_by_unique_id {
}

#[test]
#[ignore = "broken after validator node merge"]
fn it_finds_the_utxo_by_unique_id_at_deleted_height() {
let db = setup();
let unique_id = vec![1u8; 3];
Expand Down