Skip to content

Commit

Permalink
feat(epoch-sync): adding EpochSyncInfo column and its continuous popu…
Browse files Browse the repository at this point in the history
…lation (#9440)

Adding EpochSyncInfo column and its population on epoch end in block postprocessing.
  • Loading branch information
posvyatokum committed Aug 16, 2023
1 parent da3ec0b commit a5238cc
Show file tree
Hide file tree
Showing 14 changed files with 237 additions and 4 deletions.
1 change: 1 addition & 0 deletions chain/chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ expensive_tests = []
test_features = []
delay_detector = ["delay-detector/delay_detector"]
no_cache = ["near-store/no_cache"]
new_epoch_sync = ["near-store/new_epoch_sync", "near-primitives/new_epoch_sync"]

protocol_feature_reject_blocks_with_outdated_protocol_version = [
"near-primitives/protocol_feature_reject_blocks_with_outdated_protocol_version",
Expand Down
73 changes: 73 additions & 0 deletions chain/chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,11 @@ use near_primitives::challenge::{
MaybeEncodedShardChunk, PartialState, SlashedValidator,
};
use near_primitives::checked_feature;
#[cfg(feature = "new_epoch_sync")]
use near_primitives::epoch_manager::{
block_info::BlockInfo,
epoch_sync::{BlockHeaderPair, EpochSyncInfo},
};
use near_primitives::errors::EpochError;
use near_primitives::hash::{hash, CryptoHash};
use near_primitives::merkle::{
Expand Down Expand Up @@ -5311,6 +5316,13 @@ impl<'a> ChainUpdate<'a> {
.add_validator_proposals(BlockHeaderInfo::new(block.header(), last_finalized_height))?;
self.chain_store_update.merge(epoch_manager_update);

#[cfg(feature = "new_epoch_sync")]
{
// BlockInfo should be already recorded in epoch_manager cache
let block_info = self.epoch_manager.get_block_info(block.hash())?;
self.save_epoch_sync_info(block.header().epoch_id(), block.header(), &block_info)?;
}

// Add validated block to the db, even if it's not the canonical fork.
self.chain_store_update.save_block(block.clone());
self.chain_store_update.inc_block_refcount(prev_hash)?;
Expand Down Expand Up @@ -5687,6 +5699,67 @@ impl<'a> ChainUpdate<'a> {
self.chain_store_update.save_chunk_extra(block_header.hash(), &shard_uid, new_chunk_extra);
Ok(true)
}

/// If the block is the last one in the epoch
/// construct and record `EpochSyncInfo` to `self.chain_store_update`.
#[cfg(feature = "new_epoch_sync")]
fn save_epoch_sync_info(
&mut self,
epoch_id: &EpochId,
last_block_header: &BlockHeader,
last_block_info: &BlockInfo,
) -> Result<(), Error> {
if self.epoch_manager.is_next_block_epoch_start(last_block_header.hash())? {
let mut store_update = self.chain_store_update.store().store_update();
store_update
.set_ser(
DBCol::EpochSyncInfo,
epoch_id.as_ref(),
&self.create_epoch_sync_info(last_block_header, last_block_info)?,
)
.map_err(EpochError::from)?;
self.chain_store_update.merge(store_update);
}
Ok(())
}

/// Create a pair of `BlockHeader`s necessary to create `BlockInfo` for `block_hash`
#[cfg(feature = "new_epoch_sync")]
fn get_header_pair(&self, block_hash: &CryptoHash) -> Result<BlockHeaderPair, Error> {
let header = self.chain_store_update.get_block_header(block_hash)?;
// `block_hash` can correspond to genesis block, for which there is no last final block recorded,
// because `last_final_block` for genesis is `CryptoHash::default()`
// Here we return just the same genesis block header as last known block header
// TODO(posvyatokum) process this case carefully in epoch sync validation
// TODO(posvyatokum) process this carefully in saving the parts of epoch sync data
let last_finalised_header = {
if *header.last_final_block() == CryptoHash::default() {
header.clone()
} else {
self.chain_store_update.get_block_header(header.last_final_block())?
}
};
Ok(BlockHeaderPair { header, last_finalised_header })
}

/// Data that is necessary to prove Epoch in new Epoch Sync.
#[cfg(feature = "new_epoch_sync")]
fn create_epoch_sync_info(
&self,
last_block_header: &BlockHeader,
last_block_info: &BlockInfo,
) -> Result<EpochSyncInfo, Error> {
let last = self.get_header_pair(last_block_header.hash())?;
let prev_last = self.get_header_pair(last_block_header.prev_hash())?;
let first = self.get_header_pair(last_block_info.epoch_first_block())?;
let epoch_info = self.epoch_manager.get_epoch_info(last_block_info.epoch_id())?;
Ok(EpochSyncInfo {
last,
prev_last,
first,
block_producers: epoch_info.validators_iter().collect(),
})
}
}

pub fn do_apply_chunks(
Expand Down
18 changes: 15 additions & 3 deletions chain/chain/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ use near_epoch_manager::EpochManagerAdapter;
use near_primitives::block::Tip;
#[cfg(feature = "protocol_feature_simple_nightshade_v2")]
use near_primitives::checked_feature;
#[cfg(feature = "new_epoch_sync")]
use near_primitives::epoch_manager::epoch_sync::EpochSyncInfo;
use near_primitives::errors::InvalidTxError;
use near_primitives::hash::CryptoHash;
use near_primitives::merkle::{MerklePath, PartialMerkleTree};
Expand Down Expand Up @@ -822,6 +824,15 @@ impl ChainStore {
store_update.commit().map_err(|err| err.into())
}

/// Save epoch sync info
#[cfg(feature = "new_epoch_sync")]
pub fn get_epoch_sync_info(&self, epoch_id: &EpochId) -> Result<EpochSyncInfo, Error> {
option_to_not_found(
self.store.get_ser(DBCol::EpochSyncInfo, epoch_id.as_ref()),
"EpochSyncInfo",
)
}

/// Retrieve the kinds of state changes occurred in a given block.
///
/// We store different types of data, so we prefer to only expose minimal information about the
Expand Down Expand Up @@ -2706,9 +2717,10 @@ impl<'a> ChainStoreUpdate<'a> {
| DBCol::FlatStateChanges
| DBCol::FlatStateDeltaMetadata
| DBCol::FlatStorageStatus
| DBCol::Misc => {
unreachable!();
}
| DBCol::Misc
=> unreachable!(),
#[cfg(feature = "new_epoch_sync")]
DBCol::EpochSyncInfo => unreachable!(),
}
self.merge(store_update);
}
Expand Down
3 changes: 3 additions & 0 deletions chain/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,6 @@ sandbox = [
"near-client-primitives/sandbox",
"near-chain/sandbox",
]
new_epoch_sync = [
"near-chain/new_epoch_sync"
]
4 changes: 4 additions & 0 deletions chain/epoch-manager/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,7 @@ nightly_protocol = [
"near-store/nightly_protocol",
]
no_cache = []
new_epoch_sync = [
"near-store/new_epoch_sync",
"near-primitives/new_epoch_sync"
]
1 change: 1 addition & 0 deletions core/primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ nightly_protocol = [
"near-primitives-core/nightly_protocol",
"near-vm-runner/nightly_protocol",
]
new_epoch_sync = []


calimero_zero_storage = []
Expand Down
23 changes: 23 additions & 0 deletions core/primitives/src/epoch_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1000,3 +1000,26 @@ pub enum SlashState {
/// All other cases (tokens should be entirely slashed),
Other,
}

#[cfg(feature = "new_epoch_sync")]
pub mod epoch_sync {
use crate::block_header::BlockHeader;
use crate::types::validator_stake::ValidatorStake;
use borsh::{BorshDeserialize, BorshSerialize};

#[derive(BorshSerialize, BorshDeserialize)]
pub struct BlockHeaderPair {
pub header: BlockHeader,
pub last_finalised_header: BlockHeader,
}

/// Struct to keep all the info that is transferred for one epoch during Epoch Sync.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct EpochSyncInfo {
/// None is only used for corner case of the first epoch
pub first: BlockHeaderPair,
pub last: BlockHeaderPair,
pub prev_last: BlockHeaderPair,
pub block_producers: Vec<ValidatorStake>,
}
}
1 change: 1 addition & 0 deletions core/store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ no_cache = []
single_thread_rocksdb = [] # Deactivate RocksDB IO background threads
test_features = []
serialize_all_state_changes = []
new_epoch_sync = []

nightly_protocol = [
"near-chain-configs/nightly_protocol",
Expand Down
12 changes: 11 additions & 1 deletion core/store/src/columns.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,12 @@ pub enum DBCol {
/// - *Rows*: arbitrary string, see `crate::db::FLAT_STATE_VALUES_INLINING_MIGRATION_STATUS_KEY` for example
/// - *Column type*: arbitrary bytes
Misc,
/// Column to store data for Epoch Sync.
/// Does not contain data for genesis epoch.
/// - *Rows*: `epoch_id`
/// - *Column type*: `EpochSyncInfo
#[cfg(feature = "new_epoch_sync")]
EpochSyncInfo,
}

/// Defines different logical parts of a db key.
Expand Down Expand Up @@ -470,7 +476,9 @@ impl DBCol {
| DBCol::FlatState
| DBCol::FlatStateChanges
| DBCol::FlatStateDeltaMetadata
| DBCol::FlatStorageStatus => false,
| DBCol::FlatStorageStatus => false,
#[cfg(feature = "new_epoch_sync")]
DBCol::EpochSyncInfo => false
}
}

Expand Down Expand Up @@ -539,6 +547,8 @@ impl DBCol {
DBCol::FlatStateChanges => &[DBKeyType::ShardUId, DBKeyType::BlockHash],
DBCol::FlatStateDeltaMetadata => &[DBKeyType::ShardUId, DBKeyType::BlockHash],
DBCol::FlatStorageStatus => &[DBKeyType::ShardUId],
#[cfg(feature = "new_epoch_sync")]
DBCol::EpochSyncInfo => &[DBKeyType::EpochId],
}
}
}
Expand Down
1 change: 1 addition & 0 deletions integration-tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,4 @@ nightly_protocol = [
sandbox = ["near-chain/sandbox", "node-runtime/sandbox", "near-client/sandbox"]
no_cache = ["nearcore/no_cache"]
calimero_zero_storage = []
new_epoch_sync = ["nearcore/new_epoch_sync"]
98 changes: 98 additions & 0 deletions integration-tests/src/tests/client/epoch_sync.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
use super::utils::TestEnvNightshadeSetupExt;
use near_chain::{ChainGenesis, Provenance};
use near_chain_configs::Genesis;
use near_client::test_utils::TestEnv;
use near_client::ProcessTxResponse;
use near_crypto::{InMemorySigner, KeyType};
use near_o11y::testonly::init_test_logger;
use near_primitives::transaction::{
Action, DeployContractAction, FunctionCallAction, SignedTransaction,
};
use near_primitives_core::hash::CryptoHash;
use near_primitives_core::types::BlockHeight;
use nearcore::config::GenesisExt;

fn generate_transactions(last_hash: &CryptoHash, h: BlockHeight) -> Vec<SignedTransaction> {
let mut txs = vec![];
let signer = InMemorySigner::from_seed("test0".parse().unwrap(), KeyType::ED25519, "test0");
if h == 1 {
txs.push(SignedTransaction::from_actions(
h,
"test0".parse().unwrap(),
"test0".parse().unwrap(),
&signer,
vec![Action::DeployContract(DeployContractAction {
code: near_test_contracts::rs_contract().to_vec(),
})],
last_hash.clone(),
));
}

for i in 0..5 {
txs.push(SignedTransaction::from_actions(
h * 10 + i,
"test0".parse().unwrap(),
"test0".parse().unwrap(),
&signer,
vec![Action::FunctionCall(FunctionCallAction {
method_name: "write_random_value".to_string(),
args: vec![],
gas: 100_000_000_000_000,
deposit: 0,
})],
last_hash.clone(),
));
}

for i in 0..5 {
txs.push(SignedTransaction::send_money(
h * 10 + i,
"test0".parse().unwrap(),
"test1".parse().unwrap(),
&signer,
1,
last_hash.clone(),
));
}
txs
}

/// Produce 4 epochs with some transactions.
/// At the end of each epoch check that `EpochSyncInfo` has been recorded.
#[test]
fn test_continuous_epoch_sync_info_population() {
init_test_logger();

let epoch_length = 5;
let max_height = epoch_length * 4 + 1;

let mut genesis = Genesis::test(vec!["test0".parse().unwrap(), "test1".parse().unwrap()], 1);

genesis.config.epoch_length = epoch_length;
let mut chain_genesis = ChainGenesis::test();
chain_genesis.epoch_length = epoch_length;
let mut env = TestEnv::builder(chain_genesis)
.real_epoch_managers(&genesis.config)
.nightshade_runtimes(&genesis)
.build();

let mut last_hash = *env.clients[0].chain.genesis().hash();

for h in 1..max_height {
for tx in generate_transactions(&last_hash, h) {
assert_eq!(env.clients[0].process_tx(tx, false, false), ProcessTxResponse::ValidTx);
}

let block = env.clients[0].produce_block(h).unwrap().unwrap();
env.process_block(0, block.clone(), Provenance::PRODUCED);
last_hash = *block.hash();

if env.clients[0].epoch_manager.is_next_block_epoch_start(&last_hash).unwrap() {
let epoch_id = block.header().epoch_id().clone();

tracing::debug!("Checking epoch: {:?}", &epoch_id);
assert!(env.clients[0].chain.store().get_epoch_sync_info(&epoch_id).is_ok());
tracing::debug!("OK");
}
}
}
2 changes: 2 additions & 0 deletions integration-tests/src/tests/client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ mod benchmarks;
mod challenges;
mod chunks_management;
mod cold_storage;
#[cfg(feature = "new_epoch_sync")]
mod epoch_sync;
mod features;
mod flat_storage;
mod process_blocks;
Expand Down
3 changes: 3 additions & 0 deletions nearcore/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,9 @@ protocol_feature_fix_contract_loading_cost = [
protocol_feature_simple_nightshade_v2 = [
"near-primitives/protocol_feature_simple_nightshade_v2",
]
new_epoch_sync = [
"near-client/new_epoch_sync"
]

serialize_all_state_changes = ["near-store/serialize_all_state_changes"]
nightly = [
Expand Down
1 change: 1 addition & 0 deletions neard/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ json_rpc = ["nearcore/json_rpc"]
protocol_feature_fix_staking_threshold = ["nearcore/protocol_feature_fix_staking_threshold"]
protocol_feature_simple_nightshade_v2 = ["nearcore/protocol_feature_simple_nightshade_v2"]
serialize_all_state_changes = ["nearcore/serialize_all_state_changes"]
new_epoch_sync = ["nearcore/new_epoch_sync"]

nightly = [
"nightly_protocol",
Expand Down

0 comments on commit a5238cc

Please sign in to comment.