From dc0cc41ff91abd366fd969fa85f432afe43b6805 Mon Sep 17 00:00:00 2001 From: Shanin Roman Date: Thu, 2 Nov 2023 17:49:02 +0300 Subject: [PATCH] [refactor] #2664: Introduce new wsv Signed-off-by: Shanin Roman --- Cargo.lock | 55 +- Cargo.toml | 2 + cli/src/lib.rs | 18 +- cli/src/torii/mod.rs | 2 + cli/src/torii/routing.rs | 81 +- client/benches/tps/utils.rs | 4 +- config/src/iroha.rs | 2 +- config/src/wsv.rs | 4 +- core/Cargo.toml | 1 + core/benches/blocks/apply_blocks.rs | 37 +- core/benches/blocks/common.rs | 23 +- core/benches/blocks/validate_blocks.rs | 26 +- core/benches/kura.rs | 15 +- core/benches/validation.rs | 19 +- core/src/block.rs | 24 +- core/src/block_sync.rs | 32 +- core/src/executor.rs | 12 +- core/src/gossiper.rs | 27 +- core/src/lib.rs | 2 +- core/src/queue.rs | 113 +- core/src/smartcontracts/isi/account.rs | 296 ++- core/src/smartcontracts/isi/asset.rs | 162 +- core/src/smartcontracts/isi/block.rs | 7 +- core/src/smartcontracts/isi/domain.rs | 187 +- core/src/smartcontracts/isi/mod.rs | 183 +- core/src/smartcontracts/isi/query.rs | 166 +- core/src/smartcontracts/isi/triggers/mod.rs | 78 +- core/src/smartcontracts/isi/triggers/set.rs | 2 +- core/src/smartcontracts/isi/tx.rs | 10 +- core/src/smartcontracts/isi/world.rs | 139 +- core/src/smartcontracts/mod.rs | 34 +- core/src/smartcontracts/wasm.rs | 454 ++-- core/src/snapshot.rs | 87 +- core/src/sumeragi/main_loop.rs | 381 ++- core/src/sumeragi/mod.rs | 162 +- core/src/tx.rs | 33 +- core/src/wsv.rs | 2342 ++++++++++------- telemetry/derive/src/lib.rs | 23 +- .../derive/tests/ui_fail/args_no_wsv.stderr | 2 +- telemetry/derive/tests/ui_fail/bare_spec.rs | 2 +- .../derive/tests/ui_fail/doubled_plus.rs | 2 +- telemetry/derive/tests/ui_fail/no_args.stderr | 2 +- .../tests/ui_fail/non_snake_case_name.rs | 2 +- telemetry/derive/tests/ui_fail/not_execute.rs | 2 +- .../derive/tests/ui_fail/not_execute.stderr | 2 +- .../derive/tests/ui_fail/not_return_result.rs | 2 +- .../tests/ui_fail/not_return_result.stderr | 6 +- .../derive/tests/ui_fail/return_nothing.rs | 2 +- .../derive/tests/ui_fail/trailing_plus.rs | 2 +- 49 files changed, 3091 insertions(+), 2180 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2ae1a1f27af..25e1ffb4efd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,6 +33,17 @@ dependencies = [ "generic-array 0.14.7", ] +[[package]] +name = "ahash" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "version_check", +] + [[package]] name = "ahash" version = "0.8.3" @@ -729,6 +740,22 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "concread" +version = "0.4.3" +source = "git+https://github.com/kanidm/concread.git?branch=master#862788aca68fe75ff82ff5c9ff43e1f454ac4cd5" +dependencies = [ + "ahash 0.7.7", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", + "lru", + "smallvec", + "sptr", + "tokio", + "tracing", +] + [[package]] name = "console" version = "0.15.7" @@ -2320,6 +2347,9 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.7", +] [[package]] name = "hashbrown" @@ -2327,7 +2357,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash", + "ahash 0.8.3", ] [[package]] @@ -2336,7 +2366,7 @@ version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" dependencies = [ - "ahash", + "ahash 0.8.3", ] [[package]] @@ -2824,6 +2854,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", + "storage", "tempfile", "thiserror", "tokio", @@ -3616,6 +3647,15 @@ version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +[[package]] +name = "lru" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a" +dependencies = [ + "hashbrown 0.12.3", +] + [[package]] name = "mach" version = "0.3.2" @@ -5184,6 +5224,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "storage" +version = "0.1.0" +source = "git+https://github.com/Erigara/storage.git?branch=main#d0ed7cbcabd19669261be241de676e6212abec11" +dependencies = [ + "concread", + "serde", +] + [[package]] name = "streaming-stats" version = "0.2.3" @@ -5720,7 +5769,7 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5c266b9ac83dedf0e0385ad78514949e6d89491269e7065bee51d2bb8ec7373" dependencies = [ - "ahash", + "ahash 0.8.3", "gethostname", "log", "serde", diff --git a/Cargo.toml b/Cargo.toml index 26eb80f2ff9..ff1e7761fd4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,6 +134,8 @@ serde_with = { version = "3.3.0", default-features = false } parity-scale-codec = { version = "3.6.5", default-features = false } json5 = "0.4.1" +storage = { git = "https://github.com/Erigara/storage.git", branch="main" } + [workspace.lints] rust.anonymous_parameters = "deny" rust.future_incompatible = "deny" diff --git a/cli/src/lib.rs b/cli/src/lib.rs index ba2586bc652..af6fc8c3ada 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -19,13 +19,13 @@ use iroha_core::{ gossiper::{TransactionGossiper, TransactionGossiperHandle}, handler::ThreadHandler, kura::Kura, - prelude::{World, WorldStateView}, query::store::LiveQueryStore, queue::Queue, smartcontracts::isi::Registrable as _, snapshot::{try_read_snapshot, SnapshotMaker, SnapshotMakerHandle}, sumeragi::{SumeragiHandle, SumeragiStartArgs}, tx::PeerId, + wsv::{State, World}, IrohaNetwork, }; use iroha_data_model::prelude::*; @@ -95,6 +95,8 @@ pub struct Iroha { pub torii: Option, /// Snapshot service pub snapshot_maker: SnapshotMakerHandle, + /// State of blockchain + pub wsv: Arc, /// Thread handlers thread_handlers: Vec, @@ -256,7 +258,7 @@ impl Iroha { .map_or_else( |error| { iroha_logger::warn!(%error, "Failed to load wsv from snapshot, creating empty wsv"); - WorldStateView::from_configuration( + State::from_configuration( *config.wsv, world, Arc::clone(&kura), @@ -265,12 +267,13 @@ impl Iroha { }, |wsv| { iroha_logger::info!( - at_height = wsv.height(), + at_height = wsv.view().height(), "Successfully loaded wsv from snapshot" ); wsv }, ); + let wsv = Arc::new(wsv); let queue = Arc::new(Queue::from_configuration(&config.queue)); if Self::start_telemetry(telemetry, &config).await? { @@ -284,7 +287,7 @@ impl Iroha { let sumeragi = SumeragiHandle::start(SumeragiStartArgs { configuration: &config.sumeragi, events_sender: events_sender.clone(), - wsv, + wsv: Arc::clone(&wsv), queue: Arc::clone(&queue), kura: Arc::clone(&kura), network: network.clone(), @@ -298,6 +301,7 @@ impl Iroha { Arc::clone(&kura), PeerId::new(&config.torii.p2p_addr, &config.public_key), network.clone(), + Arc::clone(&wsv), ) .start(); @@ -305,7 +309,7 @@ impl Iroha { &config.sumeragi, network.clone(), Arc::clone(&queue), - sumeragi.clone(), + Arc::clone(&wsv), ) .start(); @@ -326,7 +330,7 @@ impl Iroha { .start(); let snapshot_maker = - SnapshotMaker::from_configuration(&config.snapshot, sumeragi.clone()).start(); + SnapshotMaker::from_configuration(&config.snapshot, Arc::clone(&wsv)).start(); let torii = Torii::from_configuration( config.clone(), @@ -336,6 +340,7 @@ impl Iroha { sumeragi.clone(), live_query_store_handle, Arc::clone(&kura), + Arc::clone(&wsv), ); Self::start_listening_signal(Arc::clone(¬ify_shutdown))?; @@ -349,6 +354,7 @@ impl Iroha { kura, torii, snapshot_maker, + wsv, thread_handlers: vec![kura_thread_handler], #[cfg(debug_assertions)] freeze_status, diff --git a/cli/src/torii/mod.rs b/cli/src/torii/mod.rs index 9594362ab5d..cdd9cb14285 100644 --- a/cli/src/torii/mod.rs +++ b/cli/src/torii/mod.rs @@ -16,6 +16,7 @@ use iroha_core::{ query::store::LiveQueryStoreHandle, queue::{self, Queue}, sumeragi::SumeragiHandle, + wsv::State, EventsSender, }; use tokio::sync::Notify; @@ -40,6 +41,7 @@ pub struct Torii { sumeragi: SumeragiHandle, query_service: LiveQueryStoreHandle, kura: Arc, + wsv: Arc, } /// Torii errors. diff --git a/cli/src/torii/routing.rs b/cli/src/torii/routing.rs index 48330bdffb9..1496d2fee04 100644 --- a/cli/src/torii/routing.rs +++ b/cli/src/torii/routing.rs @@ -81,10 +81,10 @@ fn fetch_size() -> impl warp::Filter, - sumeragi: SumeragiHandle, + wsv: Arc, transaction: SignedTransaction, ) -> Result { - let wsv = sumeragi.wsv_clone(); + let wsv = wsv.view(); let transaction_limits = wsv.config.transaction_limits; let transaction = AcceptedTransaction::accept(transaction, &transaction_limits) .map_err(Error::AcceptTransaction)?; @@ -105,26 +105,29 @@ async fn handle_instructions( #[iroha_futures::telemetry_future] async fn handle_queries( live_query_store: LiveQueryStoreHandle, - sumeragi: SumeragiHandle, - + wsv: Arc, query_request: http::ClientQueryRequest, ) -> Result>> { - let handle = tokio::task::spawn_blocking(move || match query_request.0 { - QueryRequest::Query(QueryWithParameters { - query: signed_query, - sorting, - pagination, - fetch_size, - }) => sumeragi.apply_wsv(|wsv| { - let valid_query = ValidQueryRequest::validate(signed_query, wsv)?; - let query_output = valid_query.execute(wsv)?; - live_query_store - .handle_query_output(query_output, &sorting, pagination, fetch_size) - .map_err(ValidationFail::from) - }), - QueryRequest::Cursor(cursor) => live_query_store - .handle_query_cursor(cursor) - .map_err(ValidationFail::from), + let handle = tokio::task::spawn_blocking(move || { + let wsv = wsv.view(); + let wsv_snapshot = wsv.to_snapshot(); + match query_request.0 { + QueryRequest::Query(QueryWithParameters { + query: signed_query, + sorting, + pagination, + fetch_size, + }) => { + let valid_query = ValidQueryRequest::validate(signed_query, &wsv_snapshot)?; + let query_output = valid_query.execute(&wsv_snapshot)?; + live_query_store + .handle_query_output(query_output, &sorting, pagination, fetch_size) + .map_err(ValidationFail::from) + } + QueryRequest::Cursor(cursor) => live_query_store + .handle_query_cursor(cursor) + .map_err(ValidationFail::from), + } }); handle .await @@ -152,18 +155,17 @@ async fn handle_schema() -> Json { #[iroha_futures::telemetry_future] async fn handle_pending_transactions( queue: Arc, - sumeragi: SumeragiHandle, + wsv: Arc, pagination: Pagination, ) -> Result>> { - let query_response = sumeragi.apply_wsv(|wsv| { - queue - .all_transactions(wsv) - .map(Into::into) - .paginate(pagination) - .collect::>() - // TODO: - //.batched(fetch_size) - }); + let wsv = wsv.view(); + let query_response = queue + .all_transactions(&wsv) + .map(Into::into) + .paginate(pagination) + .collect::>(); + // TODO: + //.batched(fetch_size) Ok(Scale(query_response)) } @@ -332,11 +334,12 @@ mod subscription { #[iroha_futures::telemetry_future] #[cfg(feature = "telemetry")] -async fn handle_version(sumeragi: SumeragiHandle) -> Json { +async fn handle_version(wsv: Arc) -> Json { use iroha_version::Version; - let string = sumeragi - .apply_wsv(WorldStateView::latest_block_ref) + let wsv = wsv.view(); + let string = wsv + .latest_block_ref() .expect("Genesis not applied. Nothing we can do. Solve the issue and rerun.") .version() .to_string(); @@ -411,6 +414,7 @@ impl Torii { sumeragi: SumeragiHandle, query_service: LiveQueryStoreHandle, kura: Arc, + wsv: Arc, ) -> Self { Self { iroha_cfg, @@ -420,6 +424,7 @@ impl Torii { sumeragi, query_service, kura, + wsv, } } @@ -434,7 +439,7 @@ impl Torii { endpoint3( handle_pending_transactions, warp::path(uri::PENDING_TRANSACTIONS) - .and(add_state!(self.queue, self.sumeragi,)) + .and(add_state!(self.queue, self.wsv.clone(),)) .and(paginate()), ) .or(endpoint2( @@ -458,8 +463,8 @@ impl Torii { Ok::<_, Infallible>(WarpResult(handle_metrics(&sumeragi))) }); let get_api_version = warp::path(uri::API_VERSION) - .and(add_state!(self.sumeragi.clone())) - .and_then(|sumeragi| async { Ok::<_, Infallible>(handle_version(sumeragi).await) }); + .and(add_state!(self.wsv.clone())) + .and_then(|wsv| async { Ok::<_, Infallible>(handle_version(wsv).await) }); #[cfg(feature = "telemetry")] let get_router = get_router.or(warp::any() @@ -476,7 +481,7 @@ impl Torii { endpoint3( handle_instructions, warp::path(uri::TRANSACTION) - .and(add_state!(self.queue, self.sumeragi)) + .and(add_state!(self.queue, self.wsv.clone())) .and(warp::body::content_length_limit( self.iroha_cfg.torii.max_content_len.into(), )) @@ -485,7 +490,7 @@ impl Torii { .or(endpoint3( handle_queries, warp::path(uri::QUERY) - .and(add_state!(self.query_service, self.sumeragi,)) + .and(add_state!(self.query_service, self.wsv.clone(),)) .and(client_query_request()), )) .or(endpoint2( diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index a209c90334a..762ac077917 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -119,8 +119,8 @@ impl Config { .iroha .as_ref() .expect("Must be some") - .sumeragi - .wsv_clone(); + .wsv + .view(); let mut blocks = blocks_wsv.all_blocks().skip(blocks_out_of_measure as usize); let (txs_accepted, txs_rejected) = (0..self.blocks) .map(|_| { diff --git a/config/src/iroha.rs b/config/src/iroha.rs index cd11b80812e..a4818ebfd7f 100644 --- a/config/src/iroha.rs +++ b/config/src/iroha.rs @@ -45,7 +45,7 @@ view! { #[config(inner)] #[view(into = Box)] pub genesis: Box, - /// `WorldStateView` configuration + /// `State` configuration #[config(inner)] pub wsv: Box, /// Network configuration diff --git a/config/src/wsv.rs b/config/src/wsv.rs index aacc58734be..a7516dc7734 100644 --- a/config/src/wsv.rs +++ b/config/src/wsv.rs @@ -1,4 +1,4 @@ -//! Module for `WorldStateView`-related configuration and structs. +//! Module for `State`-related configuration and structs. use default::*; use iroha_config_base::derive::{Documented, Proxy}; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; @@ -25,7 +25,7 @@ pub mod default { TransactionLimits::new(DEFAULT_MAX_INSTRUCTION_NUMBER, DEFAULT_MAX_WASM_SIZE_BYTES); } -/// `WorldStateView` configuration. +/// `State` configuration. #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Proxy, Documented)] #[config(env_prefix = "WSV_")] #[serde(rename_all = "UPPERCASE")] diff --git a/core/Cargo.toml b/core/Cargo.toml index 73a9f5c63b8..c77af1bbaf5 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -52,6 +52,7 @@ iroha_telemetry = { workspace = true } iroha_primitives = { workspace = true } iroha_genesis = { workspace = true } iroha_wasm_codec = { workspace = true } +storage = { workspace = true, features = ["serde"] } async-trait = { workspace = true } dashmap = { workspace = true } diff --git a/core/benches/blocks/apply_blocks.rs b/core/benches/blocks/apply_blocks.rs index 6a996a4d9e1..809f34d8926 100644 --- a/core/benches/blocks/apply_blocks.rs +++ b/core/benches/blocks/apply_blocks.rs @@ -1,5 +1,5 @@ use eyre::Result; -use iroha_core::{block::CommittedBlock, prelude::*}; +use iroha_core::{block::CommittedBlock, prelude::*, wsv::State}; use iroha_data_model::prelude::*; #[path = "./common.rs"] @@ -8,12 +8,12 @@ mod common; use common::*; pub struct WsvApplyBlocks { - wsv: WorldStateView, + wsv: State, blocks: Vec, } impl WsvApplyBlocks { - /// Create [`WorldStateView`] and blocks for benchmarking + /// Create [`State`] and blocks for benchmarking /// /// # Errors /// - Failed to parse [`AccountId`] @@ -35,14 +35,21 @@ impl WsvApplyBlocks { ]; let blocks = { - // Clone wsv because it will be changed during creation of block - let mut wsv = wsv.clone(); + // Create empty wsv because it will be changed during creation of block + let wsv = build_wsv(&account_id, &key_pair); instructions .into_iter() - .map(|instructions| { - let block = - create_block(&mut wsv, instructions, account_id.clone(), key_pair.clone()); - wsv.apply_without_execution(&block).map(|_| block) + .map(|instructions| -> Result<_> { + let mut wsv_block = wsv.block(false); + let block = create_block( + &mut wsv_block, + instructions, + account_id.clone(), + key_pair.clone(), + ); + wsv_block.apply_without_execution(&block)?; + wsv_block.commit(); + Ok(block) }) .collect::, _>>()? }; @@ -59,15 +66,11 @@ impl WsvApplyBlocks { /// # Panics /// If wsv isn't one block ahead of finalized wsv. pub fn measure(Self { wsv, blocks }: &Self) -> Result<()> { - let mut finalized_wsv = wsv.clone(); - let mut wsv = finalized_wsv.clone(); - - assert_eq!(wsv.height(), 0); for (block, i) in blocks.iter().zip(1..) { - finalized_wsv = wsv.clone(); - wsv.apply(block)?; - assert_eq!(wsv.height(), i); - assert_eq!(wsv.height(), finalized_wsv.height() + 1); + let mut wsv_block = wsv.block(false); + wsv_block.apply(block)?; + assert_eq!(wsv_block.height(), i); + wsv_block.commit(); } Ok(()) diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs index f4f412eb633..4ede7e39e7e 100644 --- a/core/benches/blocks/common.rs +++ b/core/benches/blocks/common.rs @@ -6,7 +6,7 @@ use iroha_core::{ query::store::LiveQueryStore, smartcontracts::{Execute, Registrable as _}, sumeragi::network_topology::Topology, - wsv::World, + wsv::{State, StateBlock, World}, }; use iroha_data_model::{ account::Account, @@ -21,7 +21,7 @@ use serde_json::json; /// Create block pub fn create_block( - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, instructions: Vec, account_id: AccountId, key_pair: KeyPair, @@ -167,7 +167,7 @@ pub fn restore_every_nth( instructions } -pub fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> WorldStateView { +pub fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> State { let kura = iroha_core::kura::Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let mut domain = Domain::new(account_id.domain_id.clone()).build(account_id); @@ -175,20 +175,27 @@ pub fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> WorldStateView { account_id.clone(), Account::new(account_id.clone(), [key_pair.public_key().clone()]).build(account_id), ); - let mut wsv = WorldStateView::new(World::with([domain], UniqueVec::new()), kura, query_handle); - wsv.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); - wsv.config.wasm_runtime_config.fuel_limit = u64::MAX; - wsv.config.wasm_runtime_config.max_memory = u32::MAX; + let wsv = State::new(World::with([domain], UniqueVec::new()), kura, query_handle); { + let mut wsv_block = wsv.block(false); + + wsv_block.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); + wsv_block.config.wasm_runtime_config.fuel_limit = u64::MAX; + wsv_block.config.wasm_runtime_config.max_memory = u32::MAX; + + let mut wsv_transaction = wsv_block.transaction(); let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("../configs/peer/executor.wasm"); let wasm = std::fs::read(&path_to_executor) .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_executor.display())); let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); UpgradeExpr::new(executor) - .execute(account_id, &mut wsv) + .execute(account_id, &mut wsv_transaction) .expect("Failed to load executor"); + + wsv_transaction.apply(); + wsv_block.commit(); } wsv diff --git a/core/benches/blocks/validate_blocks.rs b/core/benches/blocks/validate_blocks.rs index 6a6d0bc585d..fd9d1c2e1f4 100644 --- a/core/benches/blocks/validate_blocks.rs +++ b/core/benches/blocks/validate_blocks.rs @@ -1,5 +1,5 @@ use eyre::Result; -use iroha_core::prelude::*; +use iroha_core::{prelude::*, wsv::State}; use iroha_data_model::{isi::InstructionExpr, prelude::*}; #[path = "./common.rs"] @@ -7,16 +7,15 @@ mod common; use common::*; -#[derive(Clone)] pub struct WsvValidateBlocks { - wsv: WorldStateView, + wsv: State, instructions: Vec>, key_pair: KeyPair, account_id: AccountId, } impl WsvValidateBlocks { - /// Create [`WorldStateView`] and blocks for benchmarking + /// Create [`State`] and blocks for benchmarking /// /// # Errors /// - Failed to parse [`AccountId`] @@ -63,16 +62,17 @@ impl WsvValidateBlocks { account_id, }: Self, ) -> Result<()> { - let mut finalized_wsv = wsv; - let mut wsv = finalized_wsv.clone(); - - assert_eq!(wsv.height(), 0); for (instructions, i) in instructions.into_iter().zip(1..) { - finalized_wsv = wsv.clone(); - let block = create_block(&mut wsv, instructions, account_id.clone(), key_pair.clone()); - wsv.apply_without_execution(&block)?; - assert_eq!(wsv.height(), i); - assert_eq!(wsv.height(), finalized_wsv.height() + 1); + let mut wsv_block = wsv.block(false); + let block = create_block( + &mut wsv_block, + instructions, + account_id.clone(), + key_pair.clone(), + ); + wsv_block.apply_without_execution(&block)?; + assert_eq!(wsv_block.height(), i); + wsv_block.commit(); } Ok(()) diff --git a/core/benches/kura.rs b/core/benches/kura.rs index c0371201191..8ba47f8b6c9 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -10,7 +10,7 @@ use iroha_core::{ prelude::*, query::store::LiveQueryStore, sumeragi::network_topology::Topology, - wsv::World, + wsv::{State, World}, }; use iroha_crypto::KeyPair; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; @@ -44,12 +44,15 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let _thread_handle = iroha_core::kura::Kura::start(kura.clone()); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(World::new(), kura, query_handle); + let wsv = State::new(World::new(), kura, query_handle); let topology = Topology::new(UniqueVec::new()); - let mut block = BlockBuilder::new(vec![tx], topology, Vec::new()) - .chain(0, &mut wsv) - .sign(KeyPair::generate().unwrap()) - .unwrap(); + let mut block = { + let mut wsv_block = wsv.block(false); + BlockBuilder::new(vec![tx], topology, Vec::new()) + .chain(0, &mut wsv_block) + .sign(KeyPair::generate().unwrap()) + .unwrap() + }; for _ in 1..n_executors { block = block diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 0a474ab3ea0..0e8a46cfb7a 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -10,7 +10,7 @@ use iroha_core::{ smartcontracts::{isi::Registrable as _, Execute}, sumeragi::network_topology::Topology, tx::TransactionExecutor, - wsv::World, + wsv::{State, World}, }; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; use iroha_primitives::unique_vec::UniqueVec; @@ -54,12 +54,12 @@ fn build_test_transaction(keys: KeyPair) -> SignedTransaction { .expect("Failed to sign.") } -fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { +fn build_test_and_transient_wsv(keys: KeyPair) -> State { let kura = iroha_core::kura::Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let (public_key, _) = keys.into(); - let mut wsv = WorldStateView::new( + let wsv = State::new( { let domain_id = DomainId::from_str(START_DOMAIN).expect("Valid"); let account_id = AccountId::new( @@ -76,6 +76,8 @@ fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { ); { + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("../configs/peer/executor.wasm"); let wasm = std::fs::read(&path_to_executor) @@ -83,8 +85,10 @@ fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); let authority = "genesis@genesis".parse().expect("Valid"); UpgradeExpr::new(executor) - .execute(&authority, &mut wsv) + .execute(&authority, &mut wsv_transaction) .expect("Failed to load executor"); + wsv_transaction.apply(); + wsv_block.commit(); } wsv @@ -132,7 +136,7 @@ fn validate_transaction(criterion: &mut Criterion) { let _ = criterion.bench_function("validate", move |b| { let transaction_executor = TransactionExecutor::new(TRANSACTION_LIMITS); b.iter(|| { - let mut wsv = wsv.clone(); + let mut wsv = wsv.block(false); match transaction_executor.validate(transaction.clone(), &mut wsv) { Ok(_) => success_count += 1, Err(_) => failure_count += 1, @@ -150,13 +154,14 @@ fn sign_blocks(criterion: &mut Criterion) { let key_pair = KeyPair::generate().expect("Failed to generate KeyPair."); let kura = iroha_core::kura::Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(World::new(), kura, query_handle); + let wsv = State::new(World::new(), kura, query_handle); let topology = Topology::new(UniqueVec::new()); let mut success_count = 0; let mut failures_count = 0; - let block = BlockBuilder::new(vec![transaction], topology, Vec::new()).chain(0, &mut wsv); + let mut wsv_block = wsv.block(false); + let block = BlockBuilder::new(vec![transaction], topology, Vec::new()).chain(0, &mut wsv_block); let _ = criterion.bench_function("sign_block", |b| { b.iter(|| match block.clone().sign(key_pair.clone()) { diff --git a/core/src/block.rs b/core/src/block.rs index 9322d16400d..e17b0091307 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -96,6 +96,7 @@ mod pending { use iroha_data_model::transaction::TransactionValue; use super::*; + use crate::wsv::StateBlock; /// First stage in the life-cycle of a [`Block`]. /// In the beginning the block is assumed to be verified and to contain only accepted transactions. @@ -158,7 +159,7 @@ mod pending { fn categorize_transactions( transactions: Vec, - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, ) -> Vec { transactions .into_iter() @@ -188,7 +189,7 @@ mod pending { pub fn chain( self, view_change_index: u64, - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, ) -> BlockBuilder { let transactions = Self::categorize_transactions(self.0.transactions, wsv); @@ -236,7 +237,7 @@ mod chained { mod valid { use super::*; - use crate::sumeragi::network_topology::Role; + use crate::{sumeragi::network_topology::Role, wsv::StateBlock}; /// Block that was validated and accepted #[derive(Debug, Clone)] @@ -265,7 +266,7 @@ mod valid { pub fn validate( block: SignedBlock, topology: &Topology, - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, ) -> Result { let actual_commit_topology = &block.payload().commit_topology; let expected_commit_topology = &topology.ordered_peers; @@ -341,7 +342,7 @@ mod valid { fn validate_transactions( block: &SignedBlock, - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, ) -> Result<(), TransactionValidationError> { let is_genesis = block.payload().header.is_genesis(); @@ -703,7 +704,9 @@ mod tests { use iroha_data_model::prelude::*; use super::*; - use crate::{kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _}; + use crate::{ + kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, wsv::State, + }; #[test] pub fn committed_and_valid_block_hashes_are_equal() { @@ -730,7 +733,8 @@ mod tests { let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, kura, query_handle); + let wsv = State::new(world, kura, query_handle); + let mut wsv = wsv.block(false); // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); @@ -773,7 +777,8 @@ mod tests { let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, kura, query_handle); + let wsv = State::new(world, kura, query_handle); + let mut wsv = wsv.block(false); // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); @@ -844,7 +849,8 @@ mod tests { let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, kura, query_handle); + let wsv = State::new(world, kura, query_handle); + let mut wsv = wsv.block(false); let transaction_limits = &wsv.transaction_executor().transaction_limits; let domain_id = DomainId::from_str("domain").expect("Valid"); diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 1f8d8fe1fcb..6e2c4f1a90f 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -10,7 +10,7 @@ use iroha_p2p::Post; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; -use crate::{kura::Kura, sumeragi::SumeragiHandle, IrohaNetwork, NetworkMessage}; +use crate::{kura::Kura, sumeragi::SumeragiHandle, wsv::State, IrohaNetwork, NetworkMessage}; /// [`BlockSynchronizer`] actor handle. #[derive(Clone)] @@ -38,8 +38,7 @@ pub struct BlockSynchronizer { gossip_period: Duration, block_batch_size: u32, network: IrohaNetwork, - latest_hash: Option>, - previous_hash: Option>, + wsv: Arc, } impl BlockSynchronizer { @@ -56,13 +55,6 @@ impl BlockSynchronizer { loop { tokio::select! { _ = gossip_period.tick() => self.request_block().await, - _ = self.sumeragi.wsv_updated() => { - let (latest_hash, previous_hash) = self - .sumeragi - .apply_wsv(|wsv| (wsv.latest_block_hash(), wsv.previous_block_hash())); - self.latest_hash = latest_hash; - self.previous_hash = previous_hash; - } msg = message_receiver.recv() => { let Some(msg) = msg else { info!("All handler to BlockSynchronizer are dropped. Shutting down..."); @@ -93,9 +85,10 @@ impl BlockSynchronizer { /// Sends request for latest blocks to a chosen peer async fn request_latest_blocks_from_peer(&mut self, peer_id: PeerId) { + let (previous_hash, latest_hash) = self.get_hashes(); message::Message::GetBlocksAfter(message::GetBlocksAfter::new( - self.latest_hash, - self.previous_hash, + latest_hash, + previous_hash, self.peer_id.clone(), )) .send_to(&self.network, peer_id) @@ -109,9 +102,8 @@ impl BlockSynchronizer { kura: Arc, peer_id: PeerId, network: IrohaNetwork, + wsv: Arc, ) -> Self { - let (latest_hash, previous_hash) = - sumeragi.apply_wsv(|wsv| (wsv.latest_block_hash(), wsv.previous_block_hash())); Self { peer_id, sumeragi, @@ -119,10 +111,14 @@ impl BlockSynchronizer { gossip_period: Duration::from_millis(config.gossip_period_ms), block_batch_size: config.block_batch_size, network, - latest_hash, - previous_hash, + wsv, } } + + fn get_hashes(&self) -> (Option>, Option>) { + let view = self.wsv.view(); + (view.previous_block_hash(), view.latest_block_hash()) + } } pub mod message { @@ -195,7 +191,9 @@ pub mod message { warn!("Error: not sending any blocks as batch_size is equal to zero."); return; } - let local_latest_block_hash = block_sync.latest_hash; + + let (_, local_latest_block_hash) = block_sync.get_hashes(); + if *latest_hash == local_latest_block_hash || *previous_hash == local_latest_block_hash { diff --git a/core/src/executor.rs b/core/src/executor.rs index 971f7dd7867..a49b62f38d6 100644 --- a/core/src/executor.rs +++ b/core/src/executor.rs @@ -17,7 +17,7 @@ use serde::{ use crate::{ smartcontracts::{wasm, Execute as _}, - wsv::{WasmSeed, WorldStateView}, + wsv::{deserialize::WasmSeed, StateSnapshot, StateTransaction}, }; impl From for ValidationFail { @@ -136,7 +136,7 @@ impl Executor { /// - Executor denied the operation. pub fn validate_transaction( &self, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, authority: &AccountId, transaction: SignedTransaction, ) -> Result<(), ValidationFail> { @@ -179,7 +179,7 @@ impl Executor { /// - Executor denied the operation. pub fn validate_instruction( &self, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, authority: &AccountId, instruction: InstructionExpr, ) -> Result<(), ValidationFail> { @@ -213,7 +213,7 @@ impl Executor { /// - Executor denied the operation. pub fn validate_query( &self, - wsv: &WorldStateView, + wsv: &StateSnapshot<'_>, authority: &AccountId, query: QueryBox, ) -> Result<(), ValidationFail> { @@ -250,12 +250,12 @@ impl Executor { pub fn migrate( &mut self, raw_executor: data_model_executor::Executor, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, authority: &AccountId, ) -> Result<(), MigrationError> { trace!("Running executor migration"); - let loaded_executor = LoadedExecutor::load(&wsv.engine, raw_executor)?; + let loaded_executor = LoadedExecutor::load(wsv.engine, raw_executor)?; let runtime = wasm::RuntimeBuilder::::new() .with_engine(wsv.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 7f1ea21b690..241df0bbcd3 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -8,10 +8,7 @@ use iroha_p2p::Broadcast; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; -use crate::{ - queue::Queue, sumeragi::SumeragiHandle, tx::AcceptedTransaction, wsv::WorldStateView, - IrohaNetwork, NetworkMessage, -}; +use crate::{queue::Queue, tx::AcceptedTransaction, wsv::State, IrohaNetwork, NetworkMessage}; /// [`Gossiper`] actor handle. #[derive(Clone)] @@ -41,10 +38,8 @@ pub struct TransactionGossiper { queue: Arc, /// [`iroha_p2p::Network`] actor handle network: IrohaNetwork, - /// Sumearagi - sumeragi: SumeragiHandle, - /// Local clone of [`WorldStateView`] - wsv: WorldStateView, + /// [`WorldState`] + wsv: Arc, } impl TransactionGossiper { @@ -61,12 +56,10 @@ impl TransactionGossiper { configuartion: &Configuration, network: IrohaNetwork, queue: Arc, - sumeragi: SumeragiHandle, + wsv: Arc, ) -> Self { - let wsv = sumeragi.wsv_clone(); Self { queue, - sumeragi, network, gossip_batch_size: configuartion.gossip_batch_size, gossip_period: Duration::from_millis(configuartion.gossip_period_ms), @@ -74,14 +67,11 @@ impl TransactionGossiper { } } - async fn run(mut self, mut message_receiver: mpsc::Receiver) { + async fn run(self, mut message_receiver: mpsc::Receiver) { let mut gossip_period = tokio::time::interval(self.gossip_period); loop { tokio::select! { _ = gossip_period.tick() => self.gossip_transactions(), - _ = self.sumeragi.wsv_updated() => { - self.wsv = self.sumeragi.wsv_clone(); - } transaction_gossip = message_receiver.recv() => { let Some(transaction_gossip) = transaction_gossip else { iroha_logger::info!("All handler to Gossiper are dropped. Shutting down..."); @@ -97,7 +87,7 @@ impl TransactionGossiper { fn gossip_transactions(&self) { let txs = self .queue - .n_random_transactions(self.gossip_batch_size, &self.wsv); + .n_random_transactions(self.gossip_batch_size, &self.wsv.view()); if txs.is_empty() { iroha_logger::debug!("Nothing to gossip"); @@ -113,11 +103,12 @@ impl TransactionGossiper { fn handle_transaction_gossip(&self, TransactionGossip { txs }: TransactionGossip) { iroha_logger::trace!(size = txs.len(), "Received new transaction gossip"); + let wsv = self.wsv.view(); for tx in txs { - let transaction_limits = &self.wsv.config.transaction_limits; + let transaction_limits = &wsv.config.transaction_limits; match AcceptedTransaction::accept(tx, transaction_limits) { - Ok(tx) => match self.queue.push(tx, &self.wsv) { + Ok(tx) => match self.queue.push(tx, &wsv) { Ok(_) => {} Err(crate::queue::Failure { tx, diff --git a/core/src/lib.rs b/core/src/lib.rs index e0c6109e31f..8c6aba0ce93 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -171,7 +171,7 @@ pub mod prelude { pub use crate::{ smartcontracts::ValidQuery, tx::AcceptedTransaction, - wsv::{World, WorldStateView}, + wsv::{StateView, World}, }; } diff --git a/core/src/queue.rs b/core/src/queue.rs index b9b36f793e7..559050b99a6 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -17,7 +17,7 @@ use crate::prelude::*; impl AcceptedTransaction { // TODO: We should have another type of transaction like `CheckedTransaction` in the type system? - fn check_signature_condition(&self, wsv: &WorldStateView) -> Result> { + fn check_signature_condition(&self, wsv: &StateView<'_>) -> Result> { let authority = &self.payload().authority; let transaction_signatories = self @@ -27,7 +27,7 @@ impl AcceptedTransaction { .cloned() .collect(); - wsv.map_account(authority, |account| { + wsv.world.map_account(authority, |account| { Ok(account .signature_check_condition .check(&account.signatories, &transaction_signatories)) @@ -35,7 +35,7 @@ impl AcceptedTransaction { } /// Check if [`self`] is committed or rejected. - fn is_in_blockchain(&self, wsv: &WorldStateView) -> bool { + fn is_in_blockchain(&self, wsv: &StateView<'_>) -> bool { wsv.has_transaction(self.hash()) } } @@ -109,7 +109,7 @@ impl Queue { } } - fn is_pending(&self, tx: &AcceptedTransaction, wsv: &WorldStateView) -> bool { + fn is_pending(&self, tx: &AcceptedTransaction, wsv: &StateView) -> bool { !self.is_expired(tx) && !tx.is_in_blockchain(wsv) } @@ -138,7 +138,7 @@ impl Queue { /// Returns all pending transactions. pub fn all_transactions<'wsv>( &'wsv self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateView, ) -> impl Iterator + 'wsv { self.txs.iter().filter_map(|tx| { if self.is_pending(tx.value(), wsv) { @@ -150,7 +150,7 @@ impl Queue { } /// Returns `n` randomly selected transaction from the queue. - pub fn n_random_transactions(&self, n: u32, wsv: &WorldStateView) -> Vec { + pub fn n_random_transactions(&self, n: u32, wsv: &StateView) -> Vec { self.txs .iter() .filter(|e| self.is_pending(e.value(), wsv)) @@ -161,11 +161,7 @@ impl Queue { ) } - fn check_tx( - &self, - tx: &AcceptedTransaction, - wsv: &WorldStateView, - ) -> Result, Error> { + fn check_tx(&self, tx: &AcceptedTransaction, wsv: &StateView) -> Result, Error> { if self.is_in_future(tx) { Err(Error::InFuture) } else if self.is_expired(tx) { @@ -186,7 +182,7 @@ impl Queue { /// # Errors /// See [`enum@Error`] #[allow(clippy::missing_panics_doc)] // NOTE: It's a system invariant, should never happen - pub fn push(&self, tx: AcceptedTransaction, wsv: &WorldStateView) -> Result<(), Failure> { + pub fn push(&self, tx: AcceptedTransaction, wsv: &StateView) -> Result<(), Failure> { trace!(?tx, "Pushing to the queue"); if let Err(err) = self.check_tx(&tx, wsv) { warn!("Failed to evaluate signature check. Error = {}", err); @@ -242,7 +238,7 @@ impl Queue { fn pop_from_queue( &self, seen: &mut Vec>, - wsv: &WorldStateView, + wsv: &StateView, expired_transactions: &mut Vec, ) -> Option { loop { @@ -293,7 +289,7 @@ impl Queue { #[cfg(test)] fn collect_transactions_for_block( &self, - wsv: &WorldStateView, + wsv: &StateView, max_txs_in_block: usize, ) -> Vec { let mut transactions = Vec::with_capacity(max_txs_in_block); @@ -306,7 +302,7 @@ impl Queue { /// BEWARE: Shouldn't be called in parallel with itself. pub fn get_transactions_for_block( &self, - wsv: &WorldStateView, + wsv: &StateView, max_txs_in_block: usize, transactions: &mut Vec, expired_transactions: &mut Vec, @@ -385,8 +381,11 @@ mod tests { use super::*; use crate::{ - kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, - wsv::World, PeersIds, + kura::Kura, + query::store::LiveQueryStore, + smartcontracts::isi::Registrable as _, + wsv::{State, World}, + PeersIds, }; fn accepted_tx(account_id: &str, key: KeyPair) -> AcceptedTransaction { @@ -421,11 +420,12 @@ mod tests { let key_pair = KeyPair::generate().unwrap(); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([key_pair.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -447,11 +447,12 @@ mod tests { let key_pair = KeyPair::generate().unwrap(); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([key_pair.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -494,12 +495,13 @@ mod tests { account.signature_check_condition = SignatureCheckCondition::all_account_signatures(); assert!(domain.add_account(account).is_none()); let query_handle = LiveQueryStore::test().start(); - Arc::new(WorldStateView::new( + Arc::new(State::new( World::with([domain], PeersIds::new()), kura, query_handle, )) }; + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -570,11 +572,12 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, max_transactions_in_queue: 100, @@ -598,13 +601,16 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new( + let wsv = State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, ); let tx = accepted_tx("alice@wonderland", alice_key); - wsv.transactions.insert(tx.hash(), 1); + let mut block = wsv.block(false); + block.transactions.insert(tx.hash(), 1); + block.commit(); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, max_transactions_in_queue: 100, @@ -628,7 +634,7 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new( + let wsv = State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, @@ -641,11 +647,13 @@ mod tests { .build() .expect("Default queue config should always build") }); - queue.push(tx.clone(), &wsv).unwrap(); - wsv.transactions.insert(tx.hash(), 1); + queue.push(tx.clone(), &wsv.view()).unwrap(); + let mut block = wsv.block(false); + block.transactions.insert(tx.hash(), 1); + block.commit(); assert_eq!( queue - .collect_transactions_for_block(&wsv, max_txs_in_block) + .collect_transactions_for_block(&wsv.view(), max_txs_in_block) .len(), 0 ); @@ -658,11 +666,12 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 200, max_transactions_in_queue: 100, @@ -708,11 +717,12 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, max_transactions_in_queue: 100, @@ -744,11 +754,12 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, max_transactions_in_queue: 100, @@ -787,11 +798,11 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, - ); + )); let queue = Arc::new(Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -812,7 +823,7 @@ mod tests { thread::spawn(move || { while start_time.elapsed() < run_for { let tx = accepted_tx("alice@wonderland", alice_key.clone()); - match queue_arc_clone.push(tx, &wsv_clone) { + match queue_arc_clone.push(tx, &wsv_clone.view()) { Ok(()) | Err(Failure { err: Error::Full | Error::MaximumTransactionsPerUser, @@ -827,14 +838,16 @@ mod tests { // Spawn a thread where we get_transactions_for_block and add them to WSV let get_txs_handle = { let queue_arc_clone = Arc::clone(&queue); - let mut wsv_clone = wsv; + let wsv_clone = wsv; thread::spawn(move || { while start_time.elapsed() < run_for { - for tx in - queue_arc_clone.collect_transactions_for_block(&wsv_clone, max_txs_in_block) + for tx in queue_arc_clone + .collect_transactions_for_block(&wsv_clone.view(), max_txs_in_block) { - wsv_clone.transactions.insert(tx.hash(), 1); + let mut block = wsv_clone.block(false); + block.transactions.insert(tx.hash(), 1); + block.commit(); } // Simulate random small delays thread::sleep(Duration::from_millis(rand::thread_rng().gen_range(0..25))); @@ -861,11 +874,12 @@ mod tests { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = Arc::new(WorldStateView::new( + let wsv = Arc::new(State::new( world_with_test_domains([alice_key.public_key().clone()]), kura, query_handle, )); + let wsv = wsv.view(); let queue = Queue::from_configuration(&Configuration { future_threshold_ms, @@ -911,7 +925,7 @@ mod tests { World::with([domain], PeersIds::new()) }; let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, kura, query_handle); + let wsv = State::new(world, kura, query_handle); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -926,14 +940,14 @@ mod tests { queue .push( accepted_tx("alice@wonderland", alice_key_pair.clone()), - &wsv, + &wsv.view(), ) .expect("Failed to push tx into queue"); // Second push by Alice excide limit and will be rejected let result = queue.push( accepted_tx("alice@wonderland", alice_key_pair.clone()), - &wsv, + &wsv.view(), ); assert!( matches!( @@ -948,26 +962,31 @@ mod tests { // First push by Bob should be fine despite previous Alice error queue - .push(accepted_tx("bob@wonderland", bob_key_pair.clone()), &wsv) + .push( + accepted_tx("bob@wonderland", bob_key_pair.clone()), + &wsv.view(), + ) .expect("Failed to push tx into queue"); - let transactions = queue.collect_transactions_for_block(&wsv, 10); + let transactions = queue.collect_transactions_for_block(&wsv.view(), 10); assert_eq!(transactions.len(), 2); + let mut block = wsv.block(false); for transaction in transactions { // Put transaction hashes into wsv as if they were in the blockchain - wsv.transactions.insert(transaction.hash(), 1); + block.transactions.insert(transaction.hash(), 1); } + block.commit(); // Cleanup transactions - let transactions = queue.collect_transactions_for_block(&wsv, 10); + let transactions = queue.collect_transactions_for_block(&wsv.view(), 10); assert!(transactions.is_empty()); // After cleanup Alice and Bob pushes should work fine queue - .push(accepted_tx("alice@wonderland", alice_key_pair), &wsv) + .push(accepted_tx("alice@wonderland", alice_key_pair), &wsv.view()) .expect("Failed to push tx into queue"); queue - .push(accepted_tx("bob@wonderland", bob_key_pair), &wsv) + .push(accepted_tx("bob@wonderland", bob_key_pair), &wsv.view()) .expect("Failed to push tx into queue"); } } diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index 508772dbd6b..553478238b9 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -1,11 +1,11 @@ //! This module contains implementations of smart-contract traits and instructions for [`Account`] structure -//! and implementations of [`Query`]'s to [`WorldStateView`] about [`Account`]. +//! and implementations of [`Query`]'s about [`Account`]. use iroha_data_model::{asset::AssetsMap, prelude::*, query::error::FindError}; use iroha_telemetry::metrics; use super::prelude::*; -use crate::{ValidQuery, WorldStateView}; +use crate::ValidQuery; impl Registrable for iroha_data_model::account::NewAccount { type Target = Account; @@ -39,44 +39,50 @@ pub mod isi { }; use super::*; - use crate::role::{AsRoleIdWithOwnerRef, RoleIdWithOwner, RoleIdWithOwnerRef}; + use crate::role::RoleIdWithOwner; impl Execute for Register { #[metrics(+"register_asset")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_id = self.object.id; - match wsv.asset(&asset_id) { + match wsv.world.asset(&asset_id) { Err(err) => match err { QueryExecutionFail::Find(find_err) if matches!(find_err, FindError::Asset(_)) => { assert_can_register(&asset_id.definition_id, wsv, &self.object.value)?; let asset = wsv + .world .asset_or_insert(asset_id.clone(), self.object.value) .expect("Account exists"); match asset.value { AssetValue::Quantity(increment) => { - wsv.increase_asset_total_amount( + wsv.world.increase_asset_total_amount( &asset_id.definition_id, increment, )?; } AssetValue::BigQuantity(increment) => { - wsv.increase_asset_total_amount( + wsv.world.increase_asset_total_amount( &asset_id.definition_id, increment, )?; } AssetValue::Fixed(increment) => { - wsv.increase_asset_total_amount( + wsv.world.increase_asset_total_amount( &asset_id.definition_id, increment, )?; } AssetValue::Store(_) => { - wsv.increase_asset_total_amount(&asset_id.definition_id, 1_u32)?; + wsv.world + .increase_asset_total_amount(&asset_id.definition_id, 1_u32)?; } } Ok(()) @@ -94,11 +100,15 @@ pub mod isi { impl Execute for Unregister { #[metrics(+"unregister_asset")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_id = self.object_id; let account_id = asset_id.account_id.clone(); - let asset = wsv.account_mut(&account_id).and_then(|account| { + let asset = wsv.world.account_mut(&account_id).and_then(|account| { account .remove_asset(&asset_id) .ok_or_else(|| FindError::Asset(asset_id)) @@ -106,25 +116,30 @@ pub mod isi { match asset.value { AssetValue::Quantity(increment) => { - wsv.decrease_asset_total_amount(&asset.id.definition_id, increment)?; + wsv.world + .decrease_asset_total_amount(&asset.id.definition_id, increment)?; } AssetValue::BigQuantity(increment) => { - wsv.decrease_asset_total_amount(&asset.id.definition_id, increment)?; + wsv.world + .decrease_asset_total_amount(&asset.id.definition_id, increment)?; } AssetValue::Fixed(increment) => { - wsv.decrease_asset_total_amount(&asset.id.definition_id, increment)?; + wsv.world + .decrease_asset_total_amount(&asset.id.definition_id, increment)?; } AssetValue::Store(_) => { - wsv.decrease_asset_total_amount(&asset.id.definition_id, 1_u32)?; + wsv.world + .decrease_asset_total_amount(&asset.id.definition_id, 1_u32)?; } } - wsv.emit_events(Some(AccountEvent::Asset(AssetEvent::Removed( - AssetChanged { - asset_id: asset.id, - amount: asset.value, - }, - )))); + wsv.world + .emit_events(Some(AccountEvent::Asset(AssetEvent::Removed( + AssetChanged { + asset_id: asset.id, + amount: asset.value, + }, + )))); Ok(()) } @@ -132,11 +147,16 @@ pub mod isi { impl Execute for Mint { #[metrics(+"mint_account_public_key")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let public_key = self.object; - wsv.account_mut(&account_id) + wsv.world + .account_mut(&account_id) .map_err(Error::from) .and_then(|account| { if account.signatories.contains(&public_key) { @@ -151,7 +171,8 @@ pub mod isi { Ok(()) })?; - wsv.emit_events(Some(AccountEvent::AuthenticationAdded(account_id.clone()))); + wsv.world + .emit_events(Some(AccountEvent::AuthenticationAdded(account_id.clone()))); Ok(()) } @@ -159,11 +180,15 @@ pub mod isi { impl Execute for Burn { #[metrics(+"burn_account_public_key")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let public_key = self.object; - wsv.account_mut(&account_id) + wsv.world.account_mut(&account_id) .map_err(Error::from) .and_then(|account| { if account.signatories.len() < 2 { @@ -178,7 +203,8 @@ pub mod isi { Ok(()) })?; - wsv.emit_events(Some(AccountEvent::AuthenticationRemoved(account_id))); + wsv.world + .emit_events(Some(AccountEvent::AuthenticationRemoved(account_id))); Ok(()) } @@ -186,28 +212,40 @@ pub mod isi { impl Execute for Mint { #[metrics(+"mint_account_signature_check_condition")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let signature_check_condition = self.object; - wsv.account_mut(&account_id)?.signature_check_condition = signature_check_condition; + wsv.world + .account_mut(&account_id)? + .signature_check_condition = signature_check_condition; - wsv.emit_events(Some(AccountEvent::AuthenticationAdded(account_id.clone()))); + wsv.world + .emit_events(Some(AccountEvent::AuthenticationAdded(account_id.clone()))); Ok(()) } } impl Execute for Transfer { - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - wsv.asset_definition_mut(&self.object)?.owned_by = self.destination_id.clone(); - - wsv.emit_events(Some(AssetDefinitionEvent::OwnerChanged( - AssetDefinitionOwnerChanged { - asset_definition_id: self.object, - new_owner: self.destination_id, - }, - ))); + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { + wsv.world.asset_definition_mut(&self.object)?.owned_by = self.destination_id.clone(); + + wsv.world + .emit_events(Some(AssetDefinitionEvent::OwnerChanged( + AssetDefinitionOwnerChanged { + asset_definition_id: self.object, + new_owner: self.destination_id, + }, + ))); Ok(()) } @@ -215,12 +253,17 @@ pub mod isi { impl Execute for SetKeyValue { #[metrics(+"set_account_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.object_id; let account_metadata_limits = wsv.config.account_metadata_limits; - wsv.account_mut(&account_id) + wsv.world + .account_mut(&account_id) .map_err(Error::from) .and_then(|account| { account @@ -233,11 +276,12 @@ pub mod isi { .map_err(Error::from) })?; - wsv.emit_events(Some(AccountEvent::MetadataInserted(MetadataChanged { - target_id: account_id.clone(), - key: self.key.clone(), - value: Box::new(self.value), - }))); + wsv.world + .emit_events(Some(AccountEvent::MetadataInserted(MetadataChanged { + target_id: account_id.clone(), + key: self.key.clone(), + value: Box::new(self.value), + }))); Ok(()) } @@ -245,21 +289,26 @@ pub mod isi { impl Execute for RemoveKeyValue { #[metrics(+"remove_account_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.object_id; - let value = wsv.account_mut(&account_id).and_then(|account| { + let value = wsv.world.account_mut(&account_id).and_then(|account| { account .metadata .remove(&self.key) .ok_or_else(|| FindError::MetadataKey(self.key.clone())) })?; - wsv.emit_events(Some(AccountEvent::MetadataRemoved(MetadataChanged { - target_id: account_id.clone(), - key: self.key, - value: Box::new(value), - }))); + wsv.world + .emit_events(Some(AccountEvent::MetadataRemoved(MetadataChanged { + target_id: account_id.clone(), + key: self.key, + value: Box::new(value), + }))); Ok(()) } @@ -267,23 +316,31 @@ pub mod isi { impl Execute for Grant { #[metrics(+"grant_account_permission")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let permission = self.object; let permission_id = permission.definition_id.clone(); // Check if account exists - wsv.account_mut(&account_id)?; + wsv.world.account_mut(&account_id)?; if !wsv - .permission_token_schema() + .world + .permission_token_schema .token_ids .contains(&permission_id) { return Err(FindError::PermissionToken(permission_id).into()); } - if wsv.account_contains_inherent_permission(&account_id, &permission) { + if wsv + .world + .account_contains_inherent_permission(&account_id, &permission) + { return Err(RepetitionError { instruction_type: InstructionType::Grant, id: permission.definition_id.into(), @@ -291,9 +348,9 @@ pub mod isi { .into()); } - wsv.add_account_permission(&account_id, permission); + wsv.world.add_account_permission(&account_id, permission); - wsv.emit_events(Some(AccountEvent::PermissionAdded( + wsv.world.emit_events(Some(AccountEvent::PermissionAdded( AccountPermissionChanged { account_id, permission_id, @@ -306,18 +363,25 @@ pub mod isi { impl Execute for Revoke { #[metrics(+"revoke_account_permission")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let permission = self.object; // Check if account exists - wsv.account(&account_id)?; + wsv.world.account(&account_id)?; - if !wsv.remove_account_permission(&account_id, &permission) { + if !wsv + .world + .remove_account_permission(&account_id, &permission) + { return Err(FindError::PermissionToken(permission.definition_id).into()); } - wsv.emit_events(Some(AccountEvent::PermissionRemoved( + wsv.world.emit_events(Some(AccountEvent::PermissionRemoved( AccountPermissionChanged { account_id, permission_id: permission.definition_id, @@ -330,12 +394,16 @@ pub mod isi { impl Execute for Grant { #[metrics(+"grant_account_role")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let role_id = self.object; let permissions = wsv - .world() + .world .roles .get(&role_id) .ok_or_else(|| FindError::Role(role_id.clone()))? @@ -344,12 +412,16 @@ pub mod isi { .into_iter() .map(|token| token.definition_id); - wsv.account(&account_id)?; + wsv.world.account(&account_id)?; - if !wsv + if wsv .world .account_roles - .insert(RoleIdWithOwner::new(account_id.clone(), role_id.clone())) + .insert( + RoleIdWithOwner::new(account_id.clone(), role_id.clone()), + (), + ) + .is_some() { return Err(RepetitionError { instruction_type: InstructionType::Grant, @@ -358,7 +430,7 @@ pub mod isi { .into()); } - wsv.emit_events({ + wsv.world.emit_events({ let account_id_clone = account_id.clone(); permissions .zip(core::iter::repeat_with(move || account_id.clone())) @@ -381,12 +453,16 @@ pub mod isi { impl Execute for Revoke { #[metrics(+"revoke_account_role")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.destination_id; let role_id = self.object; let permissions = wsv - .world() + .world .roles .get(&role_id) .ok_or_else(|| FindError::Role(role_id.clone()))? @@ -395,15 +471,19 @@ pub mod isi { .into_iter() .map(|token| token.definition_id); - if !wsv + if wsv .world .account_roles - .remove::(&RoleIdWithOwnerRef::new(&account_id, &role_id)) + .remove(RoleIdWithOwner { + account_id: account_id.clone(), + role_id: role_id.clone(), + }) + .is_none() { return Err(FindError::Role(role_id).into()); } - wsv.emit_events({ + wsv.world.emit_events({ let account_id_clone = account_id.clone(); permissions .zip(core::iter::repeat_with(move || account_id.clone())) @@ -427,7 +507,7 @@ pub mod isi { /// Assert that this asset can be registered to an account. fn assert_can_register( definition_id: &AssetDefinitionId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, value: &AssetValue, ) -> Result<(), Error> { let definition = asset::isi::assert_asset_type(definition_id, wsv, value.value_type())?; @@ -436,11 +516,12 @@ pub mod isi { Mintable::Not => Err(Error::Mintability(MintabilityError::MintUnmintable)), Mintable::Once => { if !value.is_zero_value() { - let asset_definition = wsv.asset_definition_mut(definition_id)?; + let asset_definition = wsv.world.asset_definition_mut(definition_id)?; forbid_minting(asset_definition)?; - wsv.emit_events(Some(AssetDefinitionEvent::MintabilityChanged( - definition_id.clone(), - ))); + wsv.world + .emit_events(Some(AssetDefinitionEvent::MintabilityChanged( + definition_id.clone(), + ))); } Ok(()) } @@ -489,20 +570,20 @@ pub mod query { }; use super::*; + use crate::wsv::StateSnapshot; impl ValidQuery for FindRolesByAccountId { #[metrics(+"find_roles_by_account_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let account_id = wsv .evaluate(&self.id) .wrap_err("Failed to evaluate account id") .map_err(|e| Error::Evaluate(e.to_string()))?; - iroha_logger::trace!(%account_id, roles=?wsv.world.roles); - wsv.account(&account_id)?; - Ok(Box::new(wsv.account_roles(&account_id).cloned())) + wsv.world.account(&account_id)?; + Ok(Box::new(wsv.world.account_roles(&account_id).cloned())) } } @@ -510,15 +591,14 @@ pub mod query { #[metrics(+"find_permission_tokens_by_account_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let account_id = wsv .evaluate(&self.id) .wrap_err("Failed to evaluate account id") .map_err(|e| Error::Evaluate(e.to_string()))?; - iroha_logger::trace!(%account_id, accounts=?wsv.world.domains); Ok(Box::new( - wsv.account_permission_tokens(&account_id)?.cloned(), + wsv.world.account_permission_tokens(&account_id)?.cloned(), )) } } @@ -527,11 +607,11 @@ pub mod query { #[metrics(+"find_all_accounts")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(|domain| domain.accounts.values()) .cloned(), )) @@ -540,13 +620,13 @@ pub mod query { impl ValidQuery for FindAccountById { #[metrics(+"find_account_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to evaluate id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - wsv.map_account(&id, Clone::clone).map_err(Into::into) + wsv.world.map_account(&id, Clone::clone).map_err(Into::into) } } @@ -554,7 +634,7 @@ pub mod query { #[metrics(+"find_account_by_name")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let name = wsv .evaluate(&self.name) @@ -562,8 +642,8 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%name); Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(move |domain| { let name = name.clone(); @@ -581,7 +661,7 @@ pub mod query { #[metrics(+"find_accounts_by_domain_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let id = wsv .evaluate(&self.domain_id) @@ -589,13 +669,13 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - Ok(Box::new(wsv.domain(&id)?.accounts.values().cloned())) + Ok(Box::new(wsv.world.domain(&id)?.accounts.values().cloned())) } } impl ValidQuery for FindAccountKeyValueByIdAndKey { #[metrics(+"find_account_key_value_by_id_and_key")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to evaluate account id") @@ -605,7 +685,8 @@ pub mod query { .wrap_err("Failed to evaluate key") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id, %key); - wsv.map_account(&id, |account| account.metadata.get(&key).map(Clone::clone))? + wsv.world + .map_account(&id, |account| account.metadata.get(&key).map(Clone::clone))? .ok_or_else(|| FindError::MetadataKey(key).into()) .map(Into::into) } @@ -615,7 +696,7 @@ pub mod query { #[metrics(+"find_accounts_with_asset")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let asset_definition_id = wsv .evaluate(&self.asset_definition_id) @@ -624,14 +705,15 @@ pub mod query { iroha_logger::trace!(%asset_definition_id); Ok(Box::new( - wsv.map_domain(&asset_definition_id.domain_id.clone(), move |domain| { - domain.accounts.values().filter(move |account| { - let asset_id = - AssetId::new(asset_definition_id.clone(), account.id().clone()); - account.assets.get(&asset_id).is_some() - }) - })? - .cloned(), + wsv.world + .map_domain(&asset_definition_id.domain_id.clone(), move |domain| { + domain.accounts.values().filter(move |account| { + let asset_id = + AssetId::new(asset_definition_id.clone(), account.id().clone()); + account.assets.get(&asset_id).is_some() + }) + })? + .cloned(), )) } } diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index 6d5fd0ccda3..a56ff701029 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -40,18 +40,25 @@ pub mod isi { impl Execute for SetKeyValue { #[metrics(+"set_asset_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_id = self.object_id; assert_asset_type(&asset_id.definition_id, wsv, AssetValueType::Store)?; // Increase `Store` asset total quantity by 1 if asset was not present earlier - if matches!(wsv.asset(&asset_id), Err(QueryExecutionFail::Find(_))) { - wsv.increase_asset_total_amount(&asset_id.definition_id, 1_u32)?; + if matches!(wsv.world.asset(&asset_id), Err(QueryExecutionFail::Find(_))) { + wsv.world + .increase_asset_total_amount(&asset_id.definition_id, 1_u32)?; } let asset_metadata_limits = wsv.config.asset_metadata_limits; - let asset = wsv.asset_or_insert(asset_id.clone(), Metadata::new())?; + let asset = wsv + .world + .asset_or_insert(asset_id.clone(), Metadata::new())?; { let store: &mut Metadata = asset @@ -65,11 +72,12 @@ pub mod isi { )?; } - wsv.emit_events(Some(AssetEvent::MetadataInserted(MetadataChanged { - target_id: asset_id, - key: self.key, - value: Box::new(self.value), - }))); + wsv.world + .emit_events(Some(AssetEvent::MetadataInserted(MetadataChanged { + target_id: asset_id, + key: self.key, + value: Box::new(self.value), + }))); Ok(()) } @@ -77,13 +85,17 @@ pub mod isi { impl Execute for RemoveKeyValue { #[metrics(+"remove_asset_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_id = self.object_id; assert_asset_type(&asset_id.definition_id, wsv, AssetValueType::Store)?; let value = { - let asset = wsv.asset_mut(&asset_id)?; + let asset = wsv.world.asset_mut(&asset_id)?; let store: &mut Metadata = asset .try_as_mut() .map_err(eyre::Error::from) @@ -93,11 +105,12 @@ pub mod isi { .ok_or_else(|| FindError::MetadataKey(self.key.clone()))? }; - wsv.emit_events(Some(AssetEvent::MetadataRemoved(MetadataChanged { - target_id: asset_id, - key: self.key, - value: Box::new(value), - }))); + wsv.world + .emit_events(Some(AssetEvent::MetadataRemoved(MetadataChanged { + target_id: asset_id, + key: self.key, + value: Box::new(value), + }))); Ok(()) } @@ -112,7 +125,7 @@ pub mod isi { fn execute( self, authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { <$ty as InnerMint>::execute(self, authority, wsv) } @@ -129,7 +142,7 @@ pub mod isi { fn execute( self, authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { <$ty as InnerBurn>::execute(self, authority, wsv) } @@ -146,7 +159,7 @@ pub mod isi { fn execute( self, authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { <$ty as InnerTransfer>::execute(self, authority, wsv) } @@ -171,7 +184,7 @@ pub mod isi { fn execute( mint: Mint, _authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> where Self: AssetInstructionInfo + CheckedOp + IntoMetric + Copy, @@ -193,7 +206,7 @@ pub mod isi { wsv, ::EXPECTED_VALUE_TYPE, )?; - let asset = wsv.asset_or_insert( + let asset = wsv.world.asset_or_insert( asset_id.clone(), ::DEFAULT_ASSET_VALUE, )?; @@ -211,10 +224,11 @@ pub mod isi { #[allow(clippy::float_arithmetic)] { wsv.new_tx_amounts.lock().push(new_quantity.into_metric()); - wsv.increase_asset_total_amount(&asset_id.definition_id, mint.object)?; + wsv.world + .increase_asset_total_amount(&asset_id.definition_id, mint.object)?; } - wsv.emit_events(Some(AssetEvent::Added(AssetChanged { + wsv.world.emit_events(Some(AssetEvent::Added(AssetChanged { asset_id, amount: mint.object.into(), }))); @@ -228,7 +242,7 @@ pub mod isi { fn execute( burn: Burn, _authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> where Self: AssetInstructionInfo + CheckedOp + IntoMetric + Copy, @@ -246,7 +260,7 @@ pub mod isi { ::EXPECTED_VALUE_TYPE, )?; let burn_quantity = { - let account = wsv.account_mut(&asset_id.account_id)?; + let account = wsv.world.account_mut(&asset_id.account_id)?; let asset = account .assets .get_mut(&asset_id) @@ -267,13 +281,15 @@ pub mod isi { #[allow(clippy::float_arithmetic)] { wsv.new_tx_amounts.lock().push(burn_quantity.into_metric()); - wsv.decrease_asset_total_amount(&asset_id.definition_id, burn.object)?; + wsv.world + .decrease_asset_total_amount(&asset_id.definition_id, burn.object)?; } - wsv.emit_events(Some(AssetEvent::Removed(AssetChanged { - asset_id: asset_id.clone(), - amount: burn.object.into(), - }))); + wsv.world + .emit_events(Some(AssetEvent::Removed(AssetChanged { + asset_id: asset_id.clone(), + amount: burn.object.into(), + }))); Ok(()) } @@ -284,7 +300,7 @@ pub mod isi { fn execute( transfer: Transfer, _authority: &AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> where Self: AssetInstructionInfo + CheckedOp + IntoMetric + Copy, @@ -299,7 +315,7 @@ pub mod isi { ); { - let account = wsv.account_mut(&source_id.account_id)?; + let account = wsv.world.account_mut(&source_id.account_id)?; let asset = account .assets .get_mut(&source_id) @@ -316,7 +332,7 @@ pub mod isi { } } - let destination_asset = wsv.asset_or_insert( + let destination_asset = wsv.world.asset_or_insert( destination_id.clone(), ::DEFAULT_ASSET_VALUE, )?; @@ -338,7 +354,7 @@ pub mod isi { .push(transfer_quantity.into_metric()); } - wsv.emit_events([ + wsv.world.emit_events([ AssetEvent::Removed(AssetChanged { asset_id: source_id, amount: transfer.object.into(), @@ -377,10 +393,10 @@ pub mod isi { /// Asserts that asset definition with [`definition_id`] has asset type [`expected_value_type`]. pub(crate) fn assert_asset_type( definition_id: &AssetDefinitionId, - wsv: &WorldStateView, + wsv: &StateTransaction<'_, '_>, expected_value_type: AssetValueType, ) -> Result { - let asset_definition = wsv.asset_definition(definition_id)?; + let asset_definition = wsv.world.asset_definition(definition_id)?; if asset_definition.value_type == expected_value_type { Ok(asset_definition) } else { @@ -395,7 +411,7 @@ pub mod isi { /// Assert that this asset is `mintable`. fn assert_can_mint( definition_id: &AssetDefinitionId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, expected_value_type: AssetValueType, ) -> Result<(), Error> { let asset_definition = assert_asset_type(definition_id, wsv, expected_value_type)?; @@ -403,11 +419,12 @@ pub mod isi { Mintable::Infinitely => Ok(()), Mintable::Not => Err(Error::Mintability(MintabilityError::MintUnmintable)), Mintable::Once => { - let asset_definition = wsv.asset_definition_mut(definition_id)?; + let asset_definition = wsv.world.asset_definition_mut(definition_id)?; forbid_minting(asset_definition)?; - wsv.emit_events(Some(AssetDefinitionEvent::MintabilityChanged( - definition_id.clone(), - ))); + wsv.world + .emit_events(Some(AssetDefinitionEvent::MintabilityChanged( + definition_id.clone(), + ))); Ok(()) } } @@ -425,16 +442,17 @@ pub mod query { }; use super::*; + use crate::wsv::StateSnapshot; impl ValidQuery for FindAllAssets { #[metrics(+"find_all_assets")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(|domain| { domain .accounts @@ -450,11 +468,11 @@ pub mod query { #[metrics(+"find_all_asset_definitions")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(|domain| domain.asset_definitions.values()) .cloned(), )) @@ -463,14 +481,14 @@ pub mod query { impl ValidQuery for FindAssetById { #[metrics(+"find_asset_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - wsv.asset(&id).map_err(|asset_err| { - if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { + wsv.world.asset(&id).map_err(|asset_err| { + if let Err(definition_err) = wsv.world.asset_definition(&id.definition_id) { definition_err.into() } else { asset_err @@ -481,13 +499,13 @@ pub mod query { impl ValidQuery for FindAssetDefinitionById { #[metrics(+"find_asset_defintion_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset definition id") .map_err(|e| Error::Evaluate(e.to_string()))?; - let entry = wsv.asset_definition(&id).map_err(Error::from)?; + let entry = wsv.world.asset_definition(&id).map_err(Error::from)?; Ok(entry) } @@ -497,7 +515,7 @@ pub mod query { #[metrics(+"find_assets_by_name")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let name = wsv .evaluate(&self.name) @@ -505,8 +523,8 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%name); Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(move |domain| { let name = name.clone(); @@ -528,14 +546,14 @@ pub mod query { #[metrics(+"find_assets_by_account_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let id = wsv .evaluate(&self.account_id) .wrap_err("Failed to get account id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - Ok(Box::new(wsv.account_assets(&id)?.cloned())) + Ok(Box::new(wsv.world.account_assets(&id)?.cloned())) } } @@ -543,7 +561,7 @@ pub mod query { #[metrics(+"find_assets_by_asset_definition_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let id = wsv .evaluate(&self.asset_definition_id) @@ -551,8 +569,8 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); Ok(Box::new( - wsv.domains() - .values() + wsv.world + .domains() .flat_map(move |domain| { let id = id.clone(); @@ -574,7 +592,7 @@ pub mod query { #[metrics(+"find_assets_by_domain_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let id = wsv .evaluate(&self.domain_id) @@ -582,7 +600,8 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); Ok(Box::new( - wsv.domain(&id)? + wsv.world + .domain(&id)? .accounts .values() .flat_map(|account| account.assets.values()) @@ -595,7 +614,7 @@ pub mod query { #[metrics(+"find_assets_by_domain_id_and_asset_definition_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { let domain_id = wsv .evaluate(&self.domain_id) @@ -605,7 +624,7 @@ pub mod query { .evaluate(&self.asset_definition_id) .wrap_err("Failed to get asset definition id") .map_err(|e| Error::Evaluate(e.to_string()))?; - let domain = wsv.domain(&domain_id)?; + let domain = wsv.world.domain(&domain_id)?; let _definition = domain .asset_definitions .get(&asset_definition_id) @@ -631,16 +650,17 @@ pub mod query { impl ValidQuery for FindAssetQuantityById { #[metrics(+"find_asset_quantity_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); let value = wsv + .world .asset(&id) .map_err(|asset_err| { - if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { + if let Err(definition_err) = wsv.world.asset_definition(&id.definition_id) { Error::Find(definition_err) } else { asset_err @@ -655,20 +675,20 @@ pub mod query { impl ValidQuery for FindTotalAssetQuantityByAssetDefinitionId { #[metrics(+"find_total_asset_quantity_by_asset_definition_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset definition id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - let asset_value = wsv.asset_total_amount(&id)?; + let asset_value = wsv.world.asset_total_amount(&id)?; Ok(asset_value) } } impl ValidQuery for FindAssetKeyValueByIdAndKey { #[metrics(+"find_asset_key_value_by_id_and_key")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset id") @@ -677,8 +697,8 @@ pub mod query { .evaluate(&self.key) .wrap_err("Failed to get key") .map_err(|e| Error::Evaluate(e.to_string()))?; - let asset = wsv.asset(&id).map_err(|asset_err| { - if let Err(definition_err) = wsv.asset_definition(&id.definition_id) { + let asset = wsv.world.asset(&id).map_err(|asset_err| { + if let Err(definition_err) = wsv.world.asset_definition(&id.definition_id) { Error::Find(definition_err) } else { asset_err diff --git a/core/src/smartcontracts/isi/block.rs b/core/src/smartcontracts/isi/block.rs index 4f241372ef1..78c1a35bf22 100644 --- a/core/src/smartcontracts/isi/block.rs +++ b/core/src/smartcontracts/isi/block.rs @@ -11,12 +11,13 @@ use iroha_data_model::{ use iroha_telemetry::metrics; use super::*; +use crate::wsv::StateSnapshot; impl ValidQuery for FindAllBlocks { #[metrics(+"find_all_blocks")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, QueryExecutionFail> { Ok(Box::new( wsv.all_blocks() @@ -30,7 +31,7 @@ impl ValidQuery for FindAllBlockHeaders { #[metrics(+"find_all_block_headers")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, QueryExecutionFail> { Ok(Box::new( wsv.all_blocks() @@ -42,7 +43,7 @@ impl ValidQuery for FindAllBlockHeaders { impl ValidQuery for FindBlockHeaderByHash { #[metrics(+"find_block_header")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let hash = wsv .evaluate(&self.hash) .wrap_err("Failed to evaluate hash") diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index b7930106a04..27616b1e616 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -42,7 +42,11 @@ pub mod isi { impl Execute for Register { #[metrics(+"register_account")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account: Account = self.object.build(authority); let account_id = account.id().clone(); @@ -51,7 +55,7 @@ pub mod isi { .validate_len(wsv.config.ident_length_limits) .map_err(Error::from)?; - let domain = wsv.domain_mut(&account_id.domain_id)?; + let domain = wsv.world.domain_mut(&account_id.domain_id)?; if domain.accounts.get(&account_id).is_some() { return Err(RepetitionError { instruction_type: InstructionType::Register, @@ -61,7 +65,8 @@ pub mod isi { } domain.add_account(account.clone()); - wsv.emit_events(Some(DomainEvent::Account(AccountEvent::Created(account)))); + wsv.world + .emit_events(Some(DomainEvent::Account(AccountEvent::Created(account)))); Ok(()) } @@ -69,17 +74,22 @@ pub mod isi { impl Execute for Unregister { #[metrics(+"unregister_account")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let account_id = self.object_id; - let domain = wsv.domain_mut(&account_id.domain_id)?; + let domain = wsv.world.domain_mut(&account_id.domain_id)?; if domain.remove_account(&account_id).is_none() { return Err(FindError::Account(account_id).into()); } - wsv.emit_events(Some(DomainEvent::Account(AccountEvent::Deleted( - account_id, - )))); + wsv.world + .emit_events(Some(DomainEvent::Account(AccountEvent::Deleted( + account_id, + )))); Ok(()) } @@ -87,7 +97,11 @@ pub mod isi { impl Execute for Register { #[metrics(+"register_asset_definition")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_definition = self.object.build(authority); asset_definition .id() @@ -96,7 +110,7 @@ pub mod isi { .map_err(Error::from)?; let asset_definition_id = asset_definition.id().clone(); - let domain = wsv.domain_mut(&asset_definition_id.domain_id)?; + let domain = wsv.world.domain_mut(&asset_definition_id.domain_id)?; if domain.asset_definitions.get(&asset_definition_id).is_some() { return Err(RepetitionError { instruction_type: InstructionType::Register, @@ -123,7 +137,7 @@ pub mod isi { domain.add_asset_definition(asset_definition.clone()); - wsv.emit_events(Some(DomainEvent::AssetDefinition( + wsv.world.emit_events(Some(DomainEvent::AssetDefinition( AssetDefinitionEvent::Created(asset_definition), ))); @@ -133,11 +147,15 @@ pub mod isi { impl Execute for Unregister { #[metrics(+"unregister_asset_definition")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_definition_id = self.object_id; let mut assets_to_remove = Vec::new(); - for domain in wsv.domains().values() { + for domain in wsv.world.domains() { for account in domain.accounts.values() { assets_to_remove.extend( account @@ -159,6 +177,7 @@ pub mod isi { for asset_id in assets_to_remove { let account_id = asset_id.account_id.clone(); if wsv + .world .account_mut(&account_id)? .remove_asset(&asset_id) .is_none() @@ -169,7 +188,7 @@ pub mod isi { events.push(AccountEvent::Asset(AssetEvent::Deleted(asset_id)).into()); } - let domain = wsv.domain_mut(&asset_definition_id.domain_id)?; + let domain = wsv.world.domain_mut(&asset_definition_id.domain_id)?; if domain .remove_asset_definition(&asset_definition_id) .is_none() @@ -183,7 +202,7 @@ pub mod isi { AssetDefinitionEvent::Deleted(asset_definition_id), ))); - wsv.emit_events(events); + wsv.world.emit_events(events); Ok(()) } @@ -191,11 +210,16 @@ pub mod isi { impl Execute for SetKeyValue { #[metrics(+"set_key_value_asset_definition")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_definition_id = self.object_id; let metadata_limits = wsv.config.asset_definition_metadata_limits; - wsv.asset_definition_mut(&asset_definition_id) + wsv.world + .asset_definition_mut(&asset_definition_id) .map_err(Error::from) .and_then(|asset_definition| { asset_definition @@ -204,13 +228,14 @@ pub mod isi { .map_err(Error::from) })?; - wsv.emit_events(Some(AssetDefinitionEvent::MetadataInserted( - MetadataChanged { - target_id: asset_definition_id, - key: self.key, - value: Box::new(self.value), - }, - ))); + wsv.world + .emit_events(Some(AssetDefinitionEvent::MetadataInserted( + MetadataChanged { + target_id: asset_definition_id, + key: self.key, + value: Box::new(self.value), + }, + ))); Ok(()) } @@ -218,25 +243,31 @@ pub mod isi { impl Execute for RemoveKeyValue { #[metrics(+"remove_key_value_asset_definition")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let asset_definition_id = self.object_id; - let value = - wsv.asset_definition_mut(&asset_definition_id) - .and_then(|asset_definition| { - asset_definition - .metadata - .remove(&self.key) - .ok_or_else(|| FindError::MetadataKey(self.key.clone())) - })?; - - wsv.emit_events(Some(AssetDefinitionEvent::MetadataRemoved( - MetadataChanged { - target_id: asset_definition_id, - key: self.key, - value: Box::new(value), - }, - ))); + let value = wsv + .world + .asset_definition_mut(&asset_definition_id) + .and_then(|asset_definition| { + asset_definition + .metadata + .remove(&self.key) + .ok_or_else(|| FindError::MetadataKey(self.key.clone())) + })?; + + wsv.world + .emit_events(Some(AssetDefinitionEvent::MetadataRemoved( + MetadataChanged { + target_id: asset_definition_id, + key: self.key, + value: Box::new(value), + }, + ))); Ok(()) } @@ -244,21 +275,26 @@ pub mod isi { impl Execute for SetKeyValue { #[metrics(+"set_domain_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let domain_id = self.object_id; let limits = wsv.config.domain_metadata_limits; - let domain = wsv.domain_mut(&domain_id)?; + let domain = wsv.world.domain_mut(&domain_id)?; domain .metadata .insert_with_limits(self.key.clone(), self.value.clone(), limits)?; - wsv.emit_events(Some(DomainEvent::MetadataInserted(MetadataChanged { - target_id: domain_id, - key: self.key, - value: Box::new(self.value), - }))); + wsv.world + .emit_events(Some(DomainEvent::MetadataInserted(MetadataChanged { + target_id: domain_id, + key: self.key, + value: Box::new(self.value), + }))); Ok(()) } @@ -266,33 +302,43 @@ pub mod isi { impl Execute for RemoveKeyValue { #[metrics(+"remove_domain_key_value")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let domain_id = self.object_id; - let domain = wsv.domain_mut(&domain_id)?; + let domain = wsv.world.domain_mut(&domain_id)?; let value = domain .metadata .remove(&self.key) .ok_or_else(|| FindError::MetadataKey(self.key.clone()))?; - wsv.emit_events(Some(DomainEvent::MetadataRemoved(MetadataChanged { - target_id: domain_id, - key: self.key, - value: Box::new(value), - }))); + wsv.world + .emit_events(Some(DomainEvent::MetadataRemoved(MetadataChanged { + target_id: domain_id, + key: self.key, + value: Box::new(value), + }))); Ok(()) } } impl Execute for Transfer { - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - wsv.domain_mut(&self.object)?.owned_by = self.destination_id.clone(); - - wsv.emit_events(Some(DomainEvent::OwnerChanged(DomainOwnerChanged { - domain_id: self.object, - new_owner: self.destination_id, - }))); + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { + wsv.world.domain_mut(&self.object)?.owned_by = self.destination_id.clone(); + + wsv.world + .emit_events(Some(DomainEvent::OwnerChanged(DomainOwnerChanged { + domain_id: self.object, + new_owner: self.destination_id, + }))); Ok(()) } @@ -308,32 +354,33 @@ pub mod query { }; use super::*; + use crate::wsv::StateSnapshot; impl ValidQuery for FindAllDomains { #[metrics(+"find_all_domains")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv.domains().values().cloned())) + Ok(Box::new(wsv.world.domains().cloned())) } } impl ValidQuery for FindDomainById { #[metrics(+"find_domain_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get domain id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id); - Ok(wsv.domain(&id)?.clone()) + Ok(wsv.world.domain(&id)?.clone()) } } impl ValidQuery for FindDomainKeyValueByIdAndKey { #[metrics(+"find_domain_key_value_by_id_and_key")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get domain id") @@ -343,7 +390,8 @@ pub mod query { .wrap_err("Failed to get key") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id, %key); - wsv.map_domain(&id, |domain| domain.metadata.get(&key).map(Clone::clone))? + wsv.world + .map_domain(&id, |domain| domain.metadata.get(&key).map(Clone::clone))? .ok_or_else(|| FindError::MetadataKey(key).into()) .map(Into::into) } @@ -351,7 +399,7 @@ pub mod query { impl ValidQuery for FindAssetDefinitionKeyValueByIdAndKey { #[metrics(+"find_asset_definition_key_value_by_id_and_key")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .wrap_err("Failed to get asset definition id") @@ -362,6 +410,7 @@ pub mod query { .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%id, %key); Ok(wsv + .world .asset_definition(&id)? .metadata .get(&key) diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index 7f80bbfade1..c9587f52a8d 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -20,7 +20,7 @@ use iroha_logger::prelude::{Span, *}; use iroha_primitives::fixed::Fixed; use super::Execute; -use crate::{prelude::*, wsv::WorldStateView}; +use crate::{prelude::*, wsv::StateTransaction}; /// Trait for proxy objects used for registration. pub trait Registrable { @@ -32,7 +32,11 @@ pub trait Registrable { } impl Execute for InstructionExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { iroha_logger::debug!(isi=%self, "Executing"); macro_rules! match_all { @@ -69,7 +73,11 @@ impl Execute for InstructionExpr { impl Execute for RegisterExpr { #[iroha_logger::log(name = "register", skip_all, fields(id))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let object_id = wsv.evaluate(&self.object)?; Span::current().record("id", &object_id.to_string()); match object_id { @@ -92,7 +100,11 @@ impl Execute for RegisterExpr { impl Execute for UnregisterExpr { #[iroha_logger::log(name = "unregister", skip_all, fields(id))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let object_id = wsv.evaluate(&self.object_id)?; Span::current().record("id", &object_id.to_string()); match object_id { @@ -120,7 +132,11 @@ impl Execute for UnregisterExpr { impl Execute for MintExpr { #[iroha_logger::log(name = "Mint", skip_all, fields(destination))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("destination", &destination_id.to_string()); @@ -175,7 +191,11 @@ impl Execute for MintExpr { impl Execute for BurnExpr { #[iroha_logger::log(name = "burn", skip_all, fields(destination))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("destination", &destination_id.to_string()); @@ -221,7 +241,11 @@ impl Execute for BurnExpr { impl Execute for TransferExpr { #[iroha_logger::log(name = "transfer", skip_all, fields(from, to))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let source_id = wsv.evaluate(&self.source_id)?; let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; @@ -281,7 +305,11 @@ impl Execute for TransferExpr { } impl Execute for SetKeyValueExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let key = wsv.evaluate(&self.key)?; let value = wsv.evaluate(&self.value)?; iroha_logger::trace!(?key, ?value, %authority); @@ -316,7 +344,11 @@ impl Execute for SetKeyValueExpr { } impl Execute for RemoveKeyValueExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let key = wsv.evaluate(&self.key)?; iroha_logger::trace!(?key, %authority); match wsv.evaluate(&self.object_id)? { @@ -338,7 +370,11 @@ impl Execute for RemoveKeyValueExpr { } impl Execute for ConditionalExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { iroha_logger::trace!(?self); if wsv.evaluate(&self.condition)? { self.then.execute(authority, wsv)?; @@ -350,7 +386,11 @@ impl Execute for ConditionalExpr { } impl Execute for PairExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { iroha_logger::trace!(?self); self.left_instruction.execute(authority, wsv)?; @@ -361,7 +401,11 @@ impl Execute for PairExpr { impl Execute for SequenceExpr { #[iroha_logger::log(skip_all, name = "Sequence", fields(count))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { Span::current().record("count", self.instructions.len()); for instruction in self.instructions { iroha_logger::trace!(%instruction); @@ -372,7 +416,11 @@ impl Execute for SequenceExpr { } impl Execute for Fail { - fn execute(self, _authority: &AccountId, _wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + _wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { iroha_logger::trace!(?self); Err(Error::Fail(self.message)) @@ -381,7 +429,11 @@ impl Execute for Fail { impl Execute for GrantExpr { #[iroha_logger::log(name = "grant", skip_all, fields(object))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("object", &object.to_string()); @@ -404,7 +456,11 @@ impl Execute for GrantExpr { impl Execute for RevokeExpr { #[iroha_logger::log(name = "revoke", skip_all, fields(object))] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("object", &object.to_string()); @@ -426,21 +482,33 @@ impl Execute for RevokeExpr { } impl Execute for SetParameterExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let parameter = wsv.evaluate(&self.parameter)?; SetParameter { parameter }.execute(authority, wsv) } } impl Execute for NewParameterExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let parameter = wsv.evaluate(&self.parameter)?; NewParameter { parameter }.execute(authority, wsv) } } impl Execute for UpgradeExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let object = wsv.evaluate(&self.object)?; match object { UpgradableBox::Executor(object) => { @@ -451,7 +519,11 @@ impl Execute for UpgradeExpr { } impl Execute for LogExpr { - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let level = wsv.evaluate(&self.level)?; let msg = wsv.evaluate(&self.msg)?; @@ -473,29 +545,40 @@ mod tests { use tokio::test; use super::*; - use crate::{kura::Kura, query::store::LiveQueryStore, wsv::World, PeersIds}; - - fn wsv_with_test_domains(kura: &Arc) -> Result { + use crate::{ + kura::Kura, + query::store::LiveQueryStore, + wsv::{State, World}, + PeersIds, + }; + + fn wsv_with_test_domains(kura: &Arc) -> Result { let world = World::with([], PeersIds::new()); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, kura.clone(), query_handle); + let wsv = State::new(world, kura.clone(), query_handle); let genesis_account_id = AccountId::from_str("genesis@genesis")?; let account_id = AccountId::from_str("alice@wonderland")?; let (public_key, _) = KeyPair::generate()?.into(); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; + let mut wsv_block = wsv.block(false); + let mut transaction = wsv_block.transaction(); RegisterExpr::new(Domain::new(DomainId::from_str("wonderland")?)) - .execute(&genesis_account_id, &mut wsv)?; + .execute(&genesis_account_id, &mut transaction)?; RegisterExpr::new(Account::new(account_id, [public_key])) - .execute(&genesis_account_id, &mut wsv)?; + .execute(&genesis_account_id, &mut transaction)?; RegisterExpr::new(AssetDefinition::store(asset_definition_id)) - .execute(&genesis_account_id, &mut wsv)?; + .execute(&genesis_account_id, &mut transaction)?; + transaction.apply(); + wsv_block.commit(); Ok(wsv) } #[test] async fn asset_store() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); @@ -504,8 +587,8 @@ mod tests { Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) - .execute(&account_id, &mut wsv)?; - let asset = wsv.asset(&asset_id)?; + .execute(&account_id, &mut wsv_transaction)?; + let asset = wsv_transaction.world.asset(&asset_id)?; let metadata: &Metadata = asset.try_as_ref()?; let bytes = metadata .get(&Name::from_str("Bytes").expect("Valid")) @@ -524,7 +607,9 @@ mod tests { #[test] async fn account_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv = wsv_block.transaction(); let account_id = AccountId::from_str("alice@wonderland")?; SetKeyValueExpr::new( IdBox::from(account_id.clone()), @@ -532,7 +617,7 @@ mod tests { vec![1_u32, 2_u32, 3_u32], ) .execute(&account_id, &mut wsv)?; - let bytes = wsv.map_account(&account_id, |account| { + let bytes = wsv.world.map_account(&account_id, |account| { account .metadata() .get(&Name::from_str("Bytes").expect("Valid")) @@ -552,7 +637,9 @@ mod tests { #[test] async fn asset_definition_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; SetKeyValueExpr::new( @@ -560,8 +647,9 @@ mod tests { Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) - .execute(&account_id, &mut wsv)?; - let bytes = wsv + .execute(&account_id, &mut wsv_transaction)?; + let bytes = wsv_transaction + .world .asset_definition(&definition_id)? .metadata() .get(&Name::from_str("Bytes")?) @@ -580,7 +668,9 @@ mod tests { #[test] async fn domain_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let domain_id = DomainId::from_str("wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; SetKeyValueExpr::new( @@ -588,8 +678,9 @@ mod tests { Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], ) - .execute(&account_id, &mut wsv)?; - let bytes = wsv + .execute(&account_id, &mut wsv_transaction)?; + let bytes = wsv_transaction + .world .domain(&domain_id)? .metadata() .get(&Name::from_str("Bytes")?) @@ -608,13 +699,15 @@ mod tests { #[test] async fn executing_unregistered_trigger_should_return_error() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let account_id = AccountId::from_str("alice@wonderland")?; let trigger_id = TriggerId::from_str("test_trigger_id")?; assert!(matches!( ExecuteTriggerExpr::new(trigger_id) - .execute(&account_id, &mut wsv) + .execute(&account_id, &mut wsv_transaction) .expect_err("Error expected"), Error::Find(_) )); @@ -625,7 +718,9 @@ mod tests { #[test] async fn unauthorized_trigger_execution_should_return_error() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = wsv_with_test_domains(&kura)?; + let wsv = wsv_with_test_domains(&kura)?; + let mut wsv_block = wsv.block(false); + let mut wsv_transaction = wsv_block.transaction(); let account_id = AccountId::from_str("alice@wonderland")?; let fake_account_id = AccountId::from_str("fake@wonderland")?; let trigger_id = TriggerId::from_str("test_trigger_id")?; @@ -636,7 +731,7 @@ mod tests { .into(); let register_account = RegisterExpr::new(Account::new(fake_account_id.clone(), [public_key])); - register_account.execute(&account_id, &mut wsv)?; + register_account.execute(&account_id, &mut wsv_transaction)?; // register the trigger let register_trigger = RegisterExpr::new(Trigger::new( @@ -652,15 +747,15 @@ mod tests { ), )); - register_trigger.execute(&account_id, &mut wsv)?; + register_trigger.execute(&account_id, &mut wsv_transaction)?; // execute with the valid account - ExecuteTriggerExpr::new(trigger_id.clone()).execute(&account_id, &mut wsv)?; + ExecuteTriggerExpr::new(trigger_id.clone()).execute(&account_id, &mut wsv_transaction)?; // execute with the fake account assert!(matches!( ExecuteTriggerExpr::new(trigger_id) - .execute(&fake_account_id, &mut wsv) + .execute(&fake_account_id, &mut wsv_transaction) .expect_err("Error expected"), Error::InvariantViolation(_) )); diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index 19671e06587..03f20d3da3c 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -4,7 +4,7 @@ use eyre::Result; use iroha_data_model::{prelude::*, query::error::QueryExecutionFail as Error}; use parity_scale_codec::{Decode, Encode}; -use crate::{prelude::ValidQuery, WorldStateView}; +use crate::{prelude::ValidQuery, wsv::StateSnapshot}; /// Represents lazy evaluated query output pub trait Lazy { @@ -62,8 +62,9 @@ impl ValidQueryRequest { /// - Account doesn't exist /// - Account doesn't have the correct public key /// - Account has incorrect permissions - pub fn validate(query: SignedQuery, wsv: &WorldStateView) -> Result { + pub fn validate(query: SignedQuery, wsv: &StateSnapshot<'_>) -> Result { let account_has_public_key = wsv + .world .map_account(query.authority(), |account| { account.signatories.contains(query.signature().public_key()) }) @@ -74,16 +75,20 @@ impl ValidQueryRequest { )) .into()); } - wsv.executor() + wsv.world + .executor .validate_query(wsv, query.authority(), query.query().clone())?; Ok(Self(query)) } - /// Execute contained query on the [`WorldStateView`]. + /// Execute contained query on the [`StateSnapshot`]. /// /// # Errors /// Forwards `self.query.execute` error. - pub fn execute<'wsv>(&'wsv self, wsv: &'wsv WorldStateView) -> Result, Error> { + pub fn execute<'wsv>( + &'wsv self, + wsv: &'wsv StateSnapshot<'wsv>, + ) -> Result, Error> { let value = self.0.query().execute(wsv)?; Ok(if let LazyValue::Iter(iter) = value { @@ -103,7 +108,7 @@ impl ValidQueryRequest { } impl ValidQuery for QueryBox { - fn execute<'wsv>(&self, wsv: &'wsv WorldStateView) -> Result, Error> { + fn execute<'wsv>(&self, wsv: &'wsv StateSnapshot<'wsv>) -> Result, Error> { iroha_logger::debug!(query=%self, "Executing"); macro_rules! match_all { @@ -175,8 +180,14 @@ mod tests { use super::*; use crate::{ - block::*, kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, - sumeragi::network_topology::Topology, tx::AcceptedTransaction, wsv::World, PeersIds, + block::*, + kura::Kura, + query::store::LiveQueryStore, + smartcontracts::isi::Registrable as _, + sumeragi::network_topology::Topology, + tx::AcceptedTransaction, + wsv::{State, World}, + PeersIds, }; static ALICE_KEYS: Lazy = Lazy::new(|| KeyPair::generate().unwrap()); @@ -248,59 +259,62 @@ mod tests { blocks: u64, valid_tx_per_block: usize, invalid_tx_per_block: usize, - ) -> Result { + ) -> Result { let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone(), query_handle); - - let limits = TransactionLimits { - max_instruction_number: 1, - max_wasm_size_bytes: 0, - }; - let huge_limits = TransactionLimits { - max_instruction_number: 1000, - max_wasm_size_bytes: 0, - }; - - wsv.config.transaction_limits = limits; - - let valid_tx = { - let instructions: [InstructionExpr; 0] = []; - let tx = TransactionBuilder::new(ALICE_ID.clone()) - .with_instructions(instructions) - .sign(ALICE_KEYS.clone())?; - AcceptedTransaction::accept(tx, &limits)? - }; - let invalid_tx = { - let isi = Fail::new("fail"); - let tx = TransactionBuilder::new(ALICE_ID.clone()) - .with_instructions([isi.clone(), isi]) - .sign(ALICE_KEYS.clone())?; - AcceptedTransaction::accept(tx, &huge_limits)? - }; + let wsv = State::new(world_with_test_domains(), kura.clone(), query_handle); + { + let mut wsv = wsv.block(false); + let limits = TransactionLimits { + max_instruction_number: 1, + max_wasm_size_bytes: 0, + }; + let huge_limits = TransactionLimits { + max_instruction_number: 1000, + max_wasm_size_bytes: 0, + }; - let mut transactions = vec![valid_tx; valid_tx_per_block]; - transactions.append(&mut vec![invalid_tx; invalid_tx_per_block]); + wsv.config.transaction_limits = limits; - let topology = Topology::new(UniqueVec::new()); - let first_block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) - .chain(0, &mut wsv) - .sign(ALICE_KEYS.clone())? - .commit(&topology) - .expect("Block is valid"); + let valid_tx = { + let instructions: [InstructionExpr; 0] = []; + let tx = TransactionBuilder::new(ALICE_ID.clone()) + .with_instructions(instructions) + .sign(ALICE_KEYS.clone())?; + AcceptedTransaction::accept(tx, &limits)? + }; + let invalid_tx = { + let isi = Fail::new("fail"); + let tx = TransactionBuilder::new(ALICE_ID.clone()) + .with_instructions([isi.clone(), isi]) + .sign(ALICE_KEYS.clone())?; + AcceptedTransaction::accept(tx, &huge_limits)? + }; - wsv.apply(&first_block)?; - kura.store_block(first_block); + let mut transactions = vec![valid_tx; valid_tx_per_block]; + transactions.append(&mut vec![invalid_tx; invalid_tx_per_block]); - for _ in 1u64..blocks { - let block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) + let topology = Topology::new(UniqueVec::new()); + let first_block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) .chain(0, &mut wsv) .sign(ALICE_KEYS.clone())? .commit(&topology) .expect("Block is valid"); - wsv.apply(&block)?; - kura.store_block(block); + wsv.apply(&first_block)?; + kura.store_block(first_block); + + for _ in 1u64..blocks { + let block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) + .chain(0, &mut wsv) + .sign(ALICE_KEYS.clone())? + .commit(&topology) + .expect("Block is valid"); + + wsv.apply(&block)?; + kura.store_block(block); + } + wsv.commit(); } Ok(wsv) @@ -310,12 +324,12 @@ mod tests { async fn asset_store() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = WorldStateView::new(world_with_test_asset_with_metadata(), kura, query_handle); + let wsv = State::new(world_with_test_asset_with_metadata(), kura, query_handle); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, ALICE_ID.clone()); - let bytes = - FindAssetKeyValueByIdAndKey::new(asset_id, Name::from_str("Bytes")?).execute(&wsv)?; + let bytes = FindAssetKeyValueByIdAndKey::new(asset_id, Name::from_str("Bytes")?) + .execute(&wsv.view().to_snapshot())?; assert_eq!( Value::Vec(vec![1_u32.to_value(), 2_u32.to_value(), 3_u32.to_value()]), bytes.into(), @@ -327,10 +341,10 @@ mod tests { async fn account_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let wsv = WorldStateView::new(world_with_test_account_with_metadata()?, kura, query_handle); + let wsv = State::new(world_with_test_account_with_metadata()?, kura, query_handle); let bytes = FindAccountKeyValueByIdAndKey::new(ALICE_ID.clone(), Name::from_str("Bytes")?) - .execute(&wsv)?; + .execute(&wsv.view().to_snapshot())?; assert_eq!( Value::Vec(vec![1_u32.to_value(), 2_u32.to_value(), 3_u32.to_value()]), bytes.into(), @@ -343,7 +357,9 @@ mod tests { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; - let blocks = FindAllBlocks.execute(&wsv)?.collect::>(); + let blocks = FindAllBlocks + .execute(&wsv.view().to_snapshot())? + .collect::>(); assert_eq!(blocks.len() as u64, num_blocks); assert!(blocks.windows(2).all(|wnd| wnd[0] >= wnd[1])); @@ -356,7 +372,9 @@ mod tests { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; - let block_headers = FindAllBlockHeaders.execute(&wsv)?.collect::>(); + let block_headers = FindAllBlockHeaders + .execute(&wsv.view().to_snapshot())? + .collect::>(); assert_eq!(block_headers.len() as u64, num_blocks); assert!(block_headers.windows(2).all(|wnd| wnd[0] >= wnd[1])); @@ -367,16 +385,18 @@ mod tests { #[test] async fn find_block_header_by_hash() -> Result<()> { let wsv = wsv_with_test_blocks_and_transactions(1, 1, 1)?; - let block = wsv.all_blocks().last().expect("WSV is empty"); + let view = wsv.view(); + let snapshot = view.to_snapshot(); + let block = snapshot.all_blocks().last().expect("WSV is empty"); assert_eq!( - FindBlockHeaderByHash::new(block.hash()).execute(&wsv)?, + FindBlockHeaderByHash::new(block.hash()).execute(&snapshot)?, block.payload().header ); assert!( FindBlockHeaderByHash::new(HashOf::from_untyped_unchecked(Hash::new([42]))) - .execute(&wsv) + .execute(&snapshot) .is_err() ); @@ -388,7 +408,9 @@ mod tests { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; - let txs = FindAllTransactions.execute(&wsv)?.collect::>(); + let view = wsv.view(); + let snapshot = view.to_snapshot(); + let txs = FindAllTransactions.execute(&snapshot)?.collect::>(); assert_eq!(txs.len() as u64, num_blocks * 2); assert_eq!( @@ -411,25 +433,30 @@ mod tests { async fn find_transaction() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone(), query_handle); + let wsv = State::new(world_with_test_domains(), kura.clone(), query_handle); + let mut block = wsv.block(false); let instructions: [InstructionExpr; 0] = []; let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions(instructions) .sign(ALICE_KEYS.clone())?; - let tx_limits = &wsv.transaction_executor().transaction_limits; + let tx_limits = &block.transaction_executor().transaction_limits; let va_tx = AcceptedTransaction::accept(tx, tx_limits)?; let topology = Topology::new(UniqueVec::new()); let vcb = BlockBuilder::new(vec![va_tx.clone()], topology.clone(), Vec::new()) - .chain(0, &mut wsv) + .chain(0, &mut block) .sign(ALICE_KEYS.clone())? .commit(&topology) .expect("Block is valid"); - wsv.apply(&vcb)?; + block.apply(&vcb)?; kura.store_block(vcb); + block.commit(); + + let view = wsv.view(); + let snapshot = view.to_snapshot(); let unapplied_tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions([UnregisterExpr::new( @@ -437,13 +464,13 @@ mod tests { )]) .sign(ALICE_KEYS.clone())?; let wrong_hash = unapplied_tx.hash(); - let not_found = FindTransactionByHash::new(wrong_hash).execute(&wsv); + let not_found = FindTransactionByHash::new(wrong_hash).execute(&snapshot); assert!(matches!( not_found, Err(Error::Find(FindError::Transaction(_))) )); - let found_accepted = FindTransactionByHash::new(va_tx.hash()).execute(&wsv)?; + let found_accepted = FindTransactionByHash::new(va_tx.hash()).execute(&snapshot)?; if found_accepted.transaction.error.is_none() { assert_eq!(va_tx.hash(), found_accepted.transaction.hash()) } @@ -473,12 +500,13 @@ mod tests { ) .is_none()); let query_handle = LiveQueryStore::test().start(); - WorldStateView::new(World::with([domain], PeersIds::new()), kura, query_handle) + State::new(World::with([domain], PeersIds::new()), kura, query_handle) }; let domain_id = DomainId::from_str("wonderland")?; let key = Name::from_str("Bytes")?; - let bytes = FindDomainKeyValueByIdAndKey::new(domain_id, key).execute(&wsv)?; + let bytes = + FindDomainKeyValueByIdAndKey::new(domain_id, key).execute(&wsv.view().to_snapshot())?; assert_eq!( Value::Vec(vec![1_u32.to_value(), 2_u32.to_value(), 3_u32.to_value()]), bytes.into(), diff --git a/core/src/smartcontracts/isi/triggers/mod.rs b/core/src/smartcontracts/isi/triggers/mod.rs index 7c814b6fe47..02f1042c335 100644 --- a/core/src/smartcontracts/isi/triggers/mod.rs +++ b/core/src/smartcontracts/isi/triggers/mod.rs @@ -24,7 +24,11 @@ pub mod isi { impl Execute for Register> { #[metrics(+"register_trigger")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let new_trigger = self.object; if !new_trigger.action.filter.mintable() { @@ -37,7 +41,7 @@ pub mod isi { } let engine = wsv.engine.clone(); // Cloning engine is cheap - let triggers = wsv.triggers_mut(); + let triggers = &mut wsv.world.triggers; let trigger_id = new_trigger.id().clone(); let success = match &new_trigger.action.filter { TriggeringFilterBox::Data(_) => triggers.add_data_trigger( @@ -75,7 +79,8 @@ pub mod isi { .into()); } - wsv.emit_events(Some(TriggerEvent::Created(trigger_id))); + wsv.world + .emit_events(Some(TriggerEvent::Created(trigger_id))); Ok(()) } @@ -83,12 +88,17 @@ pub mod isi { impl Execute for Unregister> { #[metrics(+"unregister_trigger")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let trigger_id = self.object_id.clone(); - let triggers = wsv.triggers_mut(); + let triggers = &mut wsv.world.triggers; if triggers.remove(&trigger_id) { - wsv.emit_events(Some(TriggerEvent::Deleted(self.object_id))); + wsv.world + .emit_events(Some(TriggerEvent::Deleted(self.object_id))); Ok(()) } else { Err(RepetitionError { @@ -102,10 +112,14 @@ pub mod isi { impl Execute for Mint> { #[metrics(+"mint_trigger_repetitions")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let id = self.destination_id; - let triggers = wsv.triggers_mut(); + let triggers = &mut wsv.world.triggers; triggers .inspect_by_id(&id, |action| -> Result<(), Error> { if action.mintable() { @@ -121,7 +135,7 @@ pub mod isi { .ok_or(super::set::RepeatsOverflowError) })?; - wsv.emit_events(Some(TriggerEvent::Extended( + wsv.world.emit_events(Some(TriggerEvent::Extended( TriggerNumberOfExecutionsChanged { trigger_id: id, by: self.object, @@ -134,16 +148,20 @@ pub mod isi { impl Execute for Burn> { #[metrics(+"burn_trigger_repetitions")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let trigger = self.destination_id; - let triggers = wsv.triggers_mut(); + let triggers = &mut wsv.world.triggers; triggers.mod_repeats(&trigger, |n| { n.checked_sub(self.object) .ok_or(super::set::RepeatsOverflowError) })?; // TODO: Is it okay to remove triggers with 0 repeats count from `TriggerSet` only // when they will match some of the events? - wsv.emit_events(Some(TriggerEvent::Shortened( + wsv.world.emit_events(Some(TriggerEvent::Shortened( TriggerNumberOfExecutionsChanged { trigger_id: trigger, by: self.object, @@ -156,10 +174,15 @@ pub mod isi { impl Execute for ExecuteTriggerExpr { #[metrics(+"execute_trigger")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let id = wsv.evaluate(&self.trigger_id)?; - wsv.triggers() + wsv.world + .triggers .inspect_by_id(&id, |action| -> Result<(), Error> { let allow_execute = if let TriggeringFilterBox::ExecuteTrigger(filter) = action.clone_and_box().filter @@ -186,7 +209,7 @@ pub mod isi { .ok_or_else(|| Error::Find(Box::new(FindError::Trigger(id.clone())))) .and_then(core::convert::identity)?; - wsv.execute_trigger(id, authority); + wsv.world.execute_trigger(id, authority); Ok(()) } @@ -202,21 +225,21 @@ pub mod query { }; use super::*; - use crate::prelude::*; + use crate::{prelude::*, wsv::StateSnapshot}; impl ValidQuery for FindAllActiveTriggerIds { #[metrics(+"find_all_active_triggers")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv.triggers().ids().cloned())) + Ok(Box::new(wsv.world.triggers.ids().cloned())) } } impl ValidQuery for FindTriggerById { #[metrics(+"find_trigger_by_id")] - fn execute(&self, wsv: &WorldStateView) -> Result, Error> { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result, Error> { let id = wsv .evaluate(&self.id) .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {e}")))?; @@ -224,11 +247,12 @@ pub mod query { // Can't use just `LoadedActionTrait::clone_and_box` cause this will trigger lifetime mismatch #[allow(clippy::redundant_closure_for_method_calls)] let loaded_action = wsv - .triggers() + .world + .triggers .inspect_by_id(&id, |action| action.clone_and_box()) .ok_or_else(|| Error::Find(FindError::Trigger(id.clone())))?; - let action = wsv.triggers().get_original_action(loaded_action); + let action = wsv.world.triggers.get_original_action(loaded_action); // TODO: Should we redact the metadata if the account is not the authority/owner? Ok(Trigger::new(id, action)) @@ -237,7 +261,7 @@ pub mod query { impl ValidQuery for FindTriggerKeyValueByIdAndKey { #[metrics(+"find_trigger_key_value_by_id_and_key")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let id = wsv .evaluate(&self.id) .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {e}")))?; @@ -245,7 +269,8 @@ pub mod query { .evaluate(&self.key) .map_err(|e| Error::Evaluate(format!("Failed to evaluate key. {e}")))?; iroha_logger::trace!(%id, %key); - wsv.triggers() + wsv.world + .triggers .inspect_by_id(&id, |action| { action .metadata() @@ -262,7 +287,7 @@ pub mod query { #[metrics(+"find_triggers_by_domain_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> eyre::Result> + 'wsv>, Error> { let domain_id = wsv @@ -270,12 +295,13 @@ pub mod query { .map_err(|e| Error::Evaluate(format!("Failed to evaluate domain id. {e}")))?; Ok(Box::new( - wsv.triggers() + wsv.world + .triggers .inspect_by_domain_id(&domain_id, |trigger_id, action| { (trigger_id.clone(), action.clone_and_box()) }) .map(|(trigger_id, action)| { - let action = wsv.triggers().get_original_action(action); + let action = wsv.world.triggers.get_original_action(action); Trigger::new(trigger_id, action) }), )) diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 3cd20738837..7b9369505a2 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -27,7 +27,7 @@ use serde::{ }; use thiserror::Error; -use crate::{smartcontracts::wasm, wsv::WasmSeed}; +use crate::{smartcontracts::wasm, wsv::deserialize::WasmSeed}; /// Error type for [`Set`] operations. #[derive(Debug, Error, displaydoc::Display)] diff --git a/core/src/smartcontracts/isi/tx.rs b/core/src/smartcontracts/isi/tx.rs index b33fa69f7f5..820abe628df 100644 --- a/core/src/smartcontracts/isi/tx.rs +++ b/core/src/smartcontracts/isi/tx.rs @@ -17,6 +17,7 @@ use iroha_data_model::{ use iroha_telemetry::metrics; use super::*; +use crate::wsv::StateSnapshot; pub(crate) struct BlockTransactionIter(Arc, usize); pub(crate) struct BlockTransactionRef(Arc, usize); @@ -59,7 +60,7 @@ impl ValidQuery for FindAllTransactions { #[metrics(+"find_all_transactions")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, QueryExecutionFail> { Ok(Box::new( wsv.all_blocks() @@ -76,7 +77,7 @@ impl ValidQuery for FindTransactionsByAccountId { #[metrics(+"find_transactions_by_account_id")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, QueryExecutionFail> { let account_id = wsv .evaluate(&self.account_id) @@ -97,7 +98,10 @@ impl ValidQuery for FindTransactionsByAccountId { impl ValidQuery for FindTransactionByHash { #[metrics(+"find_transaction_by_hash")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute( + &self, + wsv: &StateSnapshot<'_>, + ) -> Result { let tx_hash = wsv .evaluate(&self.hash) .wrap_err("Failed to get hash") diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index 64199fd9eb8..e70d85d965d 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -29,10 +29,14 @@ pub mod isi { impl Execute for Register { #[metrics(+"register_peer")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let peer_id = self.object.id; - let world = wsv.world_mut(); + let world = &mut wsv.world; if !world.trusted_peers_ids.push(peer_id.clone()) { return Err(RepetitionError { instruction_type: InstructionType::Register, @@ -41,7 +45,7 @@ pub mod isi { .into()); } - wsv.emit_events(Some(PeerEvent::Added(peer_id))); + world.emit_events(Some(PeerEvent::Added(peer_id))); Ok(()) } @@ -49,16 +53,20 @@ pub mod isi { impl Execute for Unregister { #[metrics(+"unregister_peer")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let peer_id = self.object_id; - let world = wsv.world_mut(); + let world = &mut wsv.world; let Some(index) = world.trusted_peers_ids.iter().position(|id| id == &peer_id) else { return Err(FindError::Peer(peer_id).into()); }; world.trusted_peers_ids.remove(index); - wsv.emit_events(Some(PeerEvent::Removed(peer_id))); + world.emit_events(Some(PeerEvent::Removed(peer_id))); Ok(()) } @@ -66,7 +74,11 @@ pub mod isi { impl Execute for Register { #[metrics("register_domain")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let domain: Domain = self.object.build(authority); let domain_id = domain.id().clone(); @@ -75,8 +87,8 @@ pub mod isi { .validate_len(wsv.config.ident_length_limits) .map_err(Error::from)?; - let world = wsv.world_mut(); - if world.domains.contains_key(&domain_id) { + let world = &mut wsv.world; + if world.domains.get(&domain_id).is_some() { return Err(RepetitionError { instruction_type: InstructionType::Register, id: IdBox::DomainId(domain_id), @@ -86,7 +98,7 @@ pub mod isi { world.domains.insert(domain_id, domain.clone()); - wsv.emit_events(Some(DomainEvent::Created(domain))); + world.emit_events(Some(DomainEvent::Created(domain))); Ok(()) } @@ -94,15 +106,19 @@ pub mod isi { impl Execute for Unregister { #[metrics("unregister_domain")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let domain_id = self.object_id; - let world = wsv.world_mut(); - if world.domains.remove(&domain_id).is_none() { + let world = &mut wsv.world; + if world.domains.remove(domain_id.clone()).is_none() { return Err(FindError::Domain(domain_id).into()); } - wsv.emit_events(Some(DomainEvent::Deleted(domain_id))); + world.emit_events(Some(DomainEvent::Deleted(domain_id))); Ok(()) } @@ -110,10 +126,14 @@ pub mod isi { impl Execute for Register { #[metrics(+"register_role")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let role = self.object.build(authority); - if wsv.roles().contains_key(role.id()) { + if wsv.world.roles.get(role.id()).is_some() { return Err(RepetitionError { instruction_type: InstructionType::Register, id: IdBox::RoleId(role.id), @@ -123,7 +143,8 @@ pub mod isi { for permission in &role.permissions { if !wsv - .permission_token_schema() + .world + .permission_token_schema .token_ids .contains(&permission.definition_id) { @@ -131,11 +152,11 @@ pub mod isi { } } - let world = wsv.world_mut(); + let world = &mut wsv.world; let role_id = role.id().clone(); world.roles.insert(role_id, role.clone()); - wsv.emit_events(Some(RoleEvent::Created(role))); + world.emit_events(Some(RoleEvent::Created(role))); Ok(()) } @@ -143,13 +164,18 @@ pub mod isi { impl Execute for Unregister { #[metrics("unregister_role")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let role_id = self.object_id; let accounts_with_role = wsv .world .account_roles .iter() + .map(|(role, _)| role) .filter(|role| role.role_id.eq(&role_id)) .map(|role| &role.account_id) .cloned() @@ -163,12 +189,12 @@ pub mod isi { revoke.execute(authority, wsv)? } - let world = wsv.world_mut(); - if world.roles.remove(&role_id).is_none() { + let world = &mut wsv.world; + if world.roles.remove(role_id.clone()).is_none() { return Err(FindError::Role(role_id).into()); } - wsv.emit_events(Some(RoleEvent::Deleted(role_id))); + world.emit_events(Some(RoleEvent::Deleted(role_id))); Ok(()) } @@ -176,18 +202,22 @@ pub mod isi { impl Execute for SetParameter { #[metrics(+"set_parameter")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let parameter = self.parameter; let parameter_id = parameter.id.clone(); - let world = wsv.world_mut(); + let world = &mut wsv.world; if !world.parameters.remove(¶meter) { return Err(FindError::Parameter(parameter_id).into()); } world.parameters.insert(parameter); - wsv.emit_events(Some(ConfigurationEvent::Changed(parameter_id))); + world.emit_events(Some(ConfigurationEvent::Changed(parameter_id))); Ok(()) } @@ -195,11 +225,15 @@ pub mod isi { impl Execute for NewParameter { #[metrics(+"new_parameter")] - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + _authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let parameter = self.parameter; let parameter_id = parameter.id.clone(); - let world = wsv.world_mut(); + let world = &mut wsv.world; if !world.parameters.insert(parameter) { return Err(RepetitionError { instruction_type: InstructionType::NewParameter, @@ -208,7 +242,7 @@ pub mod isi { .into()); } - wsv.emit_events(Some(ConfigurationEvent::Created(parameter_id))); + world.emit_events(Some(ConfigurationEvent::Created(parameter_id))); Ok(()) } @@ -216,12 +250,16 @@ pub mod isi { impl Execute for Upgrade { #[metrics(+"upgrade_executor")] - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + fn execute( + self, + authority: &AccountId, + wsv: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error> { let raw_executor = self.object; // Cloning executor to avoid multiple mutable borrows of `wsv`. // Also it's a cheap operation. - let mut upgraded_executor = wsv.executor().clone(); + let mut upgraded_executor = wsv.world.executor.clone(); upgraded_executor .migrate(raw_executor, wsv, authority) .map_err(|migration_error| { @@ -231,9 +269,10 @@ pub mod isi { )) })?; - wsv.world_mut().executor = upgraded_executor; + *wsv.world.executor.get_mut() = upgraded_executor; - wsv.emit_events(std::iter::once(ExecutorEvent::Upgraded)); + wsv.world + .emit_events(std::iter::once(ExecutorEvent::Upgraded)); Ok(()) } @@ -243,7 +282,7 @@ pub mod isi { fn execute( self, _authority: &AccountId, - _wsv: &mut WorldStateView, + _wsv: &mut StateTransaction<'_, '_>, ) -> std::result::Result<(), Error> { const TARGET: &str = "log_isi"; let Self { level, msg } = self; @@ -273,14 +312,17 @@ pub mod query { }; use super::*; + use crate::wsv::StateSnapshot; impl ValidQuery for FindAllRoles { #[metrics(+"find_all_roles")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv.world.roles.values().cloned())) + Ok(Box::new( + wsv.world.roles.iter().map(|(_, role)| role).cloned(), + )) } } @@ -288,21 +330,24 @@ pub mod query { #[metrics(+"find_all_role_ids")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv + Ok(Box::new( + wsv .world .roles - .values() + .iter() + .map(|(_, role)| role) // To me, this should probably be a method, not a field. .map(Role::id) - .cloned())) + .cloned(), + )) } } impl ValidQuery for FindRoleByRoleId { #[metrics(+"find_role_by_role_id")] - fn execute(&self, wsv: &WorldStateView) -> Result { + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { let role_id = wsv .evaluate(&self.id) .map_err(|e| Error::Evaluate(e.to_string()))?; @@ -319,16 +364,16 @@ pub mod query { #[metrics("find_all_peers")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv.peers().cloned().map(Peer::new))) + Ok(Box::new(wsv.world.peers().cloned().map(Peer::new))) } } impl ValidQuery for FindPermissionTokenSchema { #[metrics("find_permission_token_schema")] - fn execute(&self, wsv: &WorldStateView) -> Result { - Ok(wsv.permission_token_schema().clone()) + fn execute(&self, wsv: &StateSnapshot<'_>) -> Result { + Ok(wsv.world.permission_token_schema.clone()) } } @@ -336,9 +381,9 @@ pub mod query { #[metrics("find_all_parameters")] fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result + 'wsv>, Error> { - Ok(Box::new(wsv.parameters().cloned())) + Ok(Box::new(wsv.world.parameters().cloned())) } } } diff --git a/core/src/smartcontracts/mod.rs b/core/src/smartcontracts/mod.rs index 05d0195defd..438753b5526 100644 --- a/core/src/smartcontracts/mod.rs +++ b/core/src/smartcontracts/mod.rs @@ -1,6 +1,6 @@ //! Iroha smart contract functionality. Most of the traits mentioned //! [`isi`] or Iroha Special Instructions are the main way of -//! interacting with the [`WorldStateView`], even [`wasm`] based +//! interacting with the [`State`], even [`wasm`] based //! smart-contracts can only interact with the `world`, via //! instructions. @@ -16,15 +16,19 @@ use iroha_data_model::{ pub use isi::*; use self::query::{Lazy, LazyValue}; -use crate::wsv::WorldStateView; +use crate::wsv::{StateSnapshot, StateTransaction}; -/// Trait implementations should provide actions to apply changes on [`WorldStateView`]. +/// Trait implementations should provide actions to apply changes on [`StateTransaction`]. pub trait Execute { - /// Apply actions to `wsv` on behalf of `authority`. + /// Apply actions to `state_transaction` on behalf of `authority`. /// /// # Errors /// Concrete to each implementer. - fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error>; + fn execute( + self, + authority: &AccountId, + state_transaction: &mut StateTransaction<'_, '_>, + ) -> Result<(), Error>; } /// This trait should be implemented for all Iroha Queries. @@ -32,8 +36,7 @@ pub trait ValidQuery: iroha_data_model::query::Query where Self::Output: Lazy, { - /// Execute query on the [`WorldStateView`]. - /// Should not mutate [`WorldStateView`]! + /// Execute query on the [`WorldSnapshot`]. /// /// Returns Ok(QueryResult) if succeeded and Err(String) if failed. /// @@ -41,11 +44,11 @@ where /// Concrete to each implementer fn execute<'wsv>( &self, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, ) -> Result<::Lazy<'wsv>, QueryExecutionFail>; } -impl ExpressionEvaluator for WorldStateView { +impl ExpressionEvaluator for StateSnapshot<'_> { fn evaluate( &self, expression: &E, @@ -54,15 +57,24 @@ impl ExpressionEvaluator for WorldStateView { } } +impl ExpressionEvaluator for StateTransaction<'_, '_> { + fn evaluate( + &self, + expression: &E, + ) -> Result { + expression.evaluate(&Context::new(&self.to_snapshot())) + } +} + #[derive(Clone)] pub(crate) struct Context<'wsv> { values: BTreeMap, - wsv: &'wsv WorldStateView, + wsv: &'wsv StateSnapshot<'wsv>, } impl<'a> Context<'a> { /// Create new [`Self`] - pub fn new(wsv: &'a WorldStateView) -> Self { + pub fn new(wsv: &'a StateSnapshot<'a>) -> Self { Self { values: BTreeMap::new(), wsv, diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 4b901950423..214197ed67b 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -2,6 +2,8 @@ //! `WebAssembly` VM Smartcontracts can be written in Rust, compiled //! to wasm format and submitted in a transaction +use std::borrow::Borrow; + use error::*; use import::traits::{ ExecuteOperations as _, GetExecutorPayloads as _, SetPermissionTokenSchema as _, @@ -34,7 +36,7 @@ use wasmtime::{ use crate::{ query::store::LiveQueryStoreHandle, smartcontracts::{wasm::state::ValidateQueryOperation, Execute}, - wsv::WorldStateView, + wsv::{StateSnapshot, StateTransaction}, ValidQuery as _, }; @@ -332,6 +334,7 @@ pub mod state { use derive_more::Constructor; + use self::wsv::ConstState; use super::*; /// Construct [`StoreLimits`] from [`Configuration`] @@ -354,15 +357,15 @@ pub mod state { } /// State for most common operations. - /// Generic over borrowed [`WorldStateView`] type and specific executable state. + /// Generic over chain state type and specific executable state. pub struct CommonState { pub(super) authority: AccountId, pub(super) store_limits: StoreLimits, /// Span inside of which all logs are recorded for this smart contract pub(super) log_span: Span, pub(super) executed_queries: HashSet, - /// Borrowed [`WorldStateView`] kind - pub(super) wsv: W, + /// State kind + pub(super) state: W, /// Concrete state for specific executable pub(super) specific_state: S, } @@ -381,7 +384,7 @@ pub mod state { store_limits: store_limits_from_config(&config), log_span, executed_queries: HashSet::new(), - wsv, + state: wsv, specific_state, } } @@ -407,33 +410,46 @@ pub mod state { } pub mod wsv { - //! Strongly typed kinds of borrowed [`WorldStateView`] + //! Strongly typed kinds of chain state + + use std::borrow::Borrow; use super::*; - /// Const reference to [`WorldStateView`]. - pub struct WithConst<'wrld>(pub(in super::super) &'wrld WorldStateView); + /// Read-only access to chain state. + pub struct WithConst<'wrld, 'state>(pub(in super::super) &'wrld StateSnapshot<'state>); - /// Mutable reference to [`WorldStateView`]. - pub struct WithMut<'wrld>(pub(in super::super) &'wrld mut WorldStateView); + /// Mutable access to chain state. + pub struct WithMut<'wrld, 'block, 'state>( + pub(in super::super) &'wrld mut StateTransaction<'block, 'state>, + ); - /// Trait to get immutable [`WorldStateView`] + /// Trait to get immutable [`StateSnapshot`] /// - /// Exists to write generic code for [`WithWsv`] and [`WithMutWsv`. - pub trait Wsv { - /// Get immutable [`WorldStateView`] - fn wsv(&self) -> &WorldStateView; + /// Exists to write generic code for [`WithMut`] and [`WithConst`]. + pub trait ConstState { + /// Type which can be borrowed into `[WorldStateSnapshot]` + type Wsv<'wrld, 'state: 'wrld>: Borrow> + where + Self: 'state + 'wrld; + + /// Get immutable chain state. + fn wsv(&self) -> Self::Wsv<'_, '_>; } - impl Wsv for WithConst<'_> { - fn wsv(&self) -> &WorldStateView { + impl ConstState for WithConst<'_, '_> { + type Wsv<'wrld, 'state: 'wrld> = &'wrld StateSnapshot<'state> where Self: 'state; + + fn wsv(&self) -> &StateSnapshot<'_> { self.0 } } - impl Wsv for WithMut<'_> { - fn wsv(&self) -> &WorldStateView { - self.0 + impl ConstState for WithMut<'_, '_, '_> { + type Wsv<'wrld, 'state: 'wrld> = StateSnapshot<'state> where Self: 'state; + + fn wsv(&self) -> StateSnapshot<'_> { + self.0.to_snapshot() } } } @@ -490,30 +506,32 @@ pub mod state { } /// State for smart contract execution - pub type SmartContract<'wrld> = CommonState, specific::SmartContract>; + pub type SmartContract<'wrld, 'block, 'state> = + CommonState, specific::SmartContract>; /// State for trigger execution - pub type Trigger<'wrld> = CommonState, specific::Trigger>; + pub type Trigger<'wrld, 'block, 'state> = + CommonState, specific::Trigger>; - impl ValidateQueryOperation for SmartContract<'_> { + impl ValidateQueryOperation for SmartContract<'_, '_, '_> { fn validate_query( &self, authority: &AccountId, query: QueryBox, ) -> Result<(), ValidationFail> { - let wsv: &WorldStateView = self.wsv.0; - wsv.executor().validate_query(wsv, authority, query) + let wsv = self.state.wsv(); + wsv.world.executor.validate_query(&wsv, authority, query) } } - impl ValidateQueryOperation for Trigger<'_> { + impl ValidateQueryOperation for Trigger<'_, '_, '_> { fn validate_query( &self, authority: &AccountId, query: QueryBox, ) -> Result<(), ValidationFail> { - let wsv: &WorldStateView = self.wsv.0; - wsv.executor().validate_query(wsv, authority, query) + let wsv = self.state.wsv(); + wsv.world.executor.validate_query(&wsv, authority, query) } } @@ -523,23 +541,28 @@ pub mod state { use super::*; /// State for executing `validate_transaction()` entrypoint - pub type ValidateTransaction<'wrld> = - CommonState, specific::executor::ValidateTransaction>; + pub type ValidateTransaction<'wrld, 'block, 'state> = CommonState< + wsv::WithMut<'wrld, 'block, 'state>, + specific::executor::ValidateTransaction, + >; /// State for executing `validate_query()` entrypoint - pub type ValidateQuery<'wrld> = - CommonState, specific::executor::ValidateQuery>; + pub type ValidateQuery<'wrld, 'state> = + CommonState, specific::executor::ValidateQuery>; /// State for executing `validate_instruction()` entrypoint - pub type ValidateInstruction<'wrld> = - CommonState, specific::executor::ValidateInstruction>; + pub type ValidateInstruction<'wrld, 'block, 'state> = CommonState< + wsv::WithMut<'wrld, 'block, 'state>, + specific::executor::ValidateInstruction, + >; /// State for executing `migrate()` entrypoint - pub type Migrate<'wrld> = CommonState, specific::executor::Migrate>; + pub type Migrate<'wrld, 'block, 'state> = + CommonState, specific::executor::Migrate>; macro_rules! impl_blank_validate_operations { - ($($t:ident),+ $(,)?) => { $( - impl ValidateQueryOperation for $t <'_> { + ($($t:ty),+ $(,)?) => { $( + impl ValidateQueryOperation for $t { fn validate_query( &self, _authority: &AccountId, @@ -552,10 +575,10 @@ pub mod state { } impl_blank_validate_operations!( - ValidateTransaction, - ValidateInstruction, - ValidateQuery, - Migrate, + ValidateTransaction<'_, '_, '_>, + ValidateInstruction<'_, '_, '_>, + ValidateQuery<'_, '_>, + Migrate<'_, '_, '_>, ); } } @@ -720,7 +743,7 @@ impl Runtime> { } } -impl Runtime> { +impl Runtime> { fn execute_executor_validate_internal( &self, module: &wasmtime::Module, @@ -748,14 +771,14 @@ impl Runtime> { let mut state = store.into_data(); let executed_queries = state.take_executed_queries(); - forget_all_executed_queries(state.wsv.wsv().query_handle(), executed_queries)?; + forget_all_executed_queries(state.state.wsv().borrow().query_handle, executed_queries)?; Ok(validation_res) } } impl Runtime> where - W: state::wsv::Wsv, + W: state::wsv::ConstState, state::CommonState: state::ValidateQueryOperation, { fn default_execute_query( @@ -772,11 +795,12 @@ where fetch_size, }) => { let batched = { - let wsv = &state.wsv.wsv(); + let wsv = state.state.wsv(); + let wsv = wsv.borrow(); state.validate_query(&state.authority, query.clone())?; let output = query.execute(wsv)?; - wsv.query_handle() + wsv.query_handle .handle_query_output(output, &sorting, pagination, fetch_size) }?; match &batched { @@ -794,17 +818,24 @@ where if let Some(query_id) = &cursor.query_id { state.executed_queries.insert(query_id.clone()); } - state.wsv.wsv().query_handle().handle_query_cursor(cursor) + state + .state + .wsv() + .borrow() + .query_handle + .handle_query_cursor(cursor) } } .map_err(Into::into) } } -impl<'wrld, S> Runtime, S>> { +impl<'wrld, 'state, 'block, S> + Runtime, S>> +{ fn default_execute_instruction( instruction: InstructionExpr, - state: &mut state::CommonState, S>, + state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { debug!(%instruction, "Executing"); @@ -813,14 +844,14 @@ impl<'wrld, S> Runtime, S>> { // is validated and then it's executed. Here it's validating in both steps. // Add a flag indicating whether smart contract is being validated or executed let authority = state.authority.clone(); - let wsv: &mut WorldStateView = state.wsv.0; - wsv.executor() - .clone() // Cloning executor is a cheap operation - .validate_instruction(wsv, &authority, instruction) + state.state.0.world + .executor + .clone() // Cloning executor is a cheap operation + .validate_instruction(state.state.0, &authority, instruction) } } -impl<'wrld> Runtime> { +impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime> { /// Executes the given wasm smartcontract /// /// # Errors @@ -830,7 +861,7 @@ impl<'wrld> Runtime> { /// - if the execution of the smartcontract fails pub fn execute( &mut self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, authority: AccountId, bytes: impl AsRef<[u8]>, ) -> Result<()> { @@ -855,7 +886,7 @@ impl<'wrld> Runtime> { /// - if execution of the smartcontract fails (check [`Self::execute`]) pub fn validate( &mut self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, authority: AccountId, bytes: impl AsRef<[u8]>, max_instruction_count: u64, @@ -875,7 +906,7 @@ impl<'wrld> Runtime> { fn execute_smart_contract_with_state( &mut self, bytes: impl AsRef<[u8]>, - state: state::SmartContract<'wrld>, + state: state::SmartContract<'wrld, 'block, 'state>, ) -> Result<()> { let mut store = self.create_store(state); let smart_contract = self.create_smart_contract(&mut store, bytes)?; @@ -889,7 +920,7 @@ impl<'wrld> Runtime> { .map_err(ExportFnCallError::from)?; let mut state = store.into_data(); let executed_queries = state.take_executed_queries(); - forget_all_executed_queries(state.wsv.0.query_handle(), executed_queries) + forget_all_executed_queries(state.state.0.query_handle, executed_queries) } #[codec::wrap] @@ -900,13 +931,14 @@ impl<'wrld> Runtime> { } } -impl<'wrld> import::traits::ExecuteOperations> - for Runtime> +impl<'wrld, 'block, 'state> + import::traits::ExecuteOperations> + for Runtime> { #[codec::wrap] fn execute_query( query_request: SmartContractQueryRequest, - state: &mut state::SmartContract<'wrld>, + state: &mut state::SmartContract<'wrld, 'block, 'state>, ) -> Result, ValidationFail> { Self::default_execute_query(query_request, state) } @@ -914,7 +946,7 @@ impl<'wrld> import::traits::ExecuteOperations> #[codec::wrap] fn execute_instruction( instruction: InstructionExpr, - state: &mut state::SmartContract<'wrld>, + state: &mut state::SmartContract<'wrld, 'block, 'state>, ) -> Result<(), ValidationFail> { if let Some(limits_executor) = state.specific_state.limits_executor.as_mut() { limits_executor.check_instruction_limits()?; @@ -924,7 +956,7 @@ impl<'wrld> import::traits::ExecuteOperations> } } -impl<'wrld> Runtime> { +impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime> { /// Executes the given wasm trigger module /// /// # Errors @@ -933,7 +965,7 @@ impl<'wrld> Runtime> { /// - if the execution of the smartcontract fails pub fn execute_trigger_module( &mut self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, id: &TriggerId, authority: AccountId, module: &wasmtime::Module, @@ -960,7 +992,7 @@ impl<'wrld> Runtime> { let mut state = store.into_data(); let executed_queries = state.take_executed_queries(); - forget_all_executed_queries(state.wsv.0.query_handle(), executed_queries) + forget_all_executed_queries(state.state.0.query_handle, executed_queries) } #[codec::wrap] @@ -972,13 +1004,13 @@ impl<'wrld> Runtime> { } } -impl<'wrld> import::traits::ExecuteOperations> - for Runtime> +impl<'wrld, 'block, 'state> import::traits::ExecuteOperations> + for Runtime> { #[codec::wrap] fn execute_query( query_request: SmartContractQueryRequest, - state: &mut state::Trigger<'wrld>, + state: &mut state::Trigger<'wrld, 'block, 'state>, ) -> Result, ValidationFail> { Self::default_execute_query(query_request, state) } @@ -986,7 +1018,7 @@ impl<'wrld> import::traits::ExecuteOperations> #[codec::wrap] fn execute_instruction( instruction: InstructionExpr, - state: &mut state::Trigger<'wrld>, + state: &mut state::Trigger<'wrld, 'block, 'state>, ) -> Result<(), ValidationFail> { Self::default_execute_instruction(instruction, state) } @@ -995,19 +1027,24 @@ impl<'wrld> import::traits::ExecuteOperations> /// Marker trait to auto-implement [`import_traits::ExecuteOperations`] for a concrete /// *Executor* [`Runtime`]. /// -/// *Mut* means that [`WorldStateView`] will be mutated. +/// *Mut* means that chain state can be mutated. trait ExecuteOperationsAsExecutorMut {} -impl<'wrld, R, S> - import::traits::ExecuteOperations, S>> for R +impl<'wrld, 'block, 'state, R, S> + import::traits::ExecuteOperations< + state::CommonState, S>, + > for R where - R: ExecuteOperationsAsExecutorMut, S>>, - state::CommonState, S>: state::ValidateQueryOperation, + R: ExecuteOperationsAsExecutorMut< + state::CommonState, S>, + >, + state::CommonState, S>: + state::ValidateQueryOperation, { #[codec::wrap] fn execute_query( query_request: SmartContractQueryRequest, - state: &mut state::CommonState, S>, + state: &mut state::CommonState, S>, ) -> Result, ValidationFail> { debug!(%query_request, "Executing as executor"); @@ -1017,12 +1054,12 @@ where #[codec::wrap] fn execute_instruction( instruction: InstructionExpr, - state: &mut state::CommonState, S>, + state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { debug!(%instruction, "Executing as executor"); instruction - .execute(&state.authority.clone(), state.wsv.0) + .execute(&state.authority.clone(), state.state.0) .map_err(Into::into) } } @@ -1049,7 +1086,7 @@ where } } -impl<'wrld> Runtime> { +impl<'wrld, 'block, 'state> Runtime> { /// Execute `validate_transaction()` entrypoint of the given module of runtime executor /// /// # Errors @@ -1060,7 +1097,7 @@ impl<'wrld> Runtime> { /// - if unable to decode [`executor::Result`] pub fn execute_executor_validate_transaction( &self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, authority: &AccountId, module: &wasmtime::Module, transaction: SignedTransaction, @@ -1081,54 +1118,55 @@ impl<'wrld> Runtime> { } } -impl<'wrld> ExecuteOperationsAsExecutorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } -impl<'wrld> import::traits::GetExecutorPayloads> - for Runtime> +impl<'wrld, 'block, 'state> + import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] fn get_migrate_payload( - _state: &state::executor::ValidateTransaction<'wrld>, + _state: &state::executor::ValidateTransaction<'wrld, 'block, 'state>, ) -> payloads::Migrate { panic!("Executor `validate_transaction()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - state: &state::executor::ValidateTransaction<'wrld>, + state: &state::executor::ValidateTransaction<'wrld, 'block, 'state>, ) -> Validate { Validate { authority: state.authority.clone(), - block_height: state.wsv.0.height(), + block_height: state.state.0.height(), to_validate: state.specific_state.to_validate.clone(), } } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::executor::ValidateTransaction<'wrld>, + _state: &state::executor::ValidateTransaction<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `validate_transaction()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] fn get_validate_query_payload( - _state: &state::executor::ValidateTransaction<'wrld>, + _state: &state::executor::ValidateTransaction<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `validate_transaction()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_transaction"; } -impl<'wrld> Runtime> { +impl<'wrld, 'block, 'state> Runtime> { /// Execute `validate_instruction()` entrypoint of the given module of runtime executor /// /// # Errors @@ -1139,7 +1177,7 @@ impl<'wrld> Runtime> { /// - if unable to decode [`executor::Result`] pub fn execute_executor_validate_instruction( &self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, authority: &AccountId, module: &wasmtime::Module, instruction: InstructionExpr, @@ -1160,54 +1198,55 @@ impl<'wrld> Runtime> { } } -impl<'wrld> ExecuteOperationsAsExecutorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } -impl<'wrld> import::traits::GetExecutorPayloads> - for Runtime> +impl<'wrld, 'block, 'state> + import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] fn get_migrate_payload( - _state: &state::executor::ValidateInstruction<'wrld>, + _state: &state::executor::ValidateInstruction<'wrld, 'block, 'state>, ) -> payloads::Migrate { panic!("Executor `validate_instruction()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::executor::ValidateInstruction<'wrld>, + _state: &state::executor::ValidateInstruction<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `validate_instruction()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - state: &state::executor::ValidateInstruction<'wrld>, + state: &state::executor::ValidateInstruction<'wrld, 'block, 'state>, ) -> Validate { Validate { authority: state.authority.clone(), - block_height: state.wsv.0.height(), + block_height: state.state.0.height(), to_validate: state.specific_state.to_validate.clone(), } } #[codec::wrap] fn get_validate_query_payload( - _state: &state::executor::ValidateInstruction<'wrld>, + _state: &state::executor::ValidateInstruction<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `validate_instruction()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_instruction"; } -impl<'wrld> Runtime> { +impl<'wrld, 'state> Runtime> { /// Execute `validate_query()` entrypoint of the given module of runtime executor /// /// # Errors @@ -1218,7 +1257,7 @@ impl<'wrld> Runtime> { /// - if unable to decode [`executor::Result`] pub fn execute_executor_validate_query( &self, - wsv: &'wrld WorldStateView, + wsv: &'wrld StateSnapshot<'state>, authority: &AccountId, module: &wasmtime::Module, query: QueryBox, @@ -1239,13 +1278,13 @@ impl<'wrld> Runtime> { } } -impl<'wrld> import::traits::ExecuteOperations> - for Runtime> +impl<'wrld, 'state> import::traits::ExecuteOperations> + for Runtime> { #[codec::wrap] fn execute_query( query_request: SmartContractQueryRequest, - state: &mut state::executor::ValidateQuery<'wrld>, + state: &mut state::executor::ValidateQuery<'wrld, 'state>, ) -> Result, ValidationFail> { debug!(%query_request, "Executing as executor"); @@ -1255,53 +1294,56 @@ impl<'wrld> import::traits::ExecuteOperations, + _state: &mut state::executor::ValidateQuery<'wrld, 'state>, ) -> Result<(), ValidationFail> { panic!("Executor `validate_query()` entrypoint should not execute instructions") } } -impl<'wrld> import::traits::GetExecutorPayloads> - for Runtime> +impl<'wrld, 'state> + import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] - fn get_migrate_payload(_state: &state::executor::ValidateQuery<'wrld>) -> payloads::Migrate { + fn get_migrate_payload( + _state: &state::executor::ValidateQuery<'wrld, 'state>, + ) -> payloads::Migrate { panic!("Executor `validate_query()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::executor::ValidateQuery<'wrld>, + _state: &state::executor::ValidateQuery<'wrld, 'state>, ) -> Validate { panic!("Executor `validate_query()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::executor::ValidateQuery<'wrld>, + _state: &state::executor::ValidateQuery<'wrld, 'state>, ) -> Validate { panic!("Executor `validate_query()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] fn get_validate_query_payload( - state: &state::executor::ValidateQuery<'wrld>, + state: &state::executor::ValidateQuery<'wrld, 'state>, ) -> Validate { Validate { authority: state.authority.clone(), - block_height: state.wsv.0.height(), + block_height: state.state.0.height(), to_validate: state.specific_state.to_validate.clone(), } } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld, 'state> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_query"; } -impl<'wrld> Runtime> { +impl<'wrld, 'block, 'state> Runtime> { /// Execute `migrate()` entrypoint of *Executor* /// /// # Errors @@ -1312,7 +1354,7 @@ impl<'wrld> Runtime> { /// - if failed to decode [`MigrationResult`] pub fn execute_executor_migration( &self, - wsv: &'wrld mut WorldStateView, + wsv: &'wrld mut StateTransaction<'block, 'state>, authority: &AccountId, module: &wasmtime::Module, ) -> Result { @@ -1344,8 +1386,8 @@ impl<'wrld> Runtime> { } } -impl<'wrld> ExecuteOperationsAsExecutorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } @@ -1358,47 +1400,53 @@ impl<'wrld> ExecuteOperationsAsExecutorMut> /// /// Panics with error message if called, because it should never be called from /// `migrate()` entrypoint. -impl<'wrld> import::traits::GetExecutorPayloads> - for Runtime> +impl<'wrld, 'block, 'state> + import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] - fn get_migrate_payload(state: &state::executor::Migrate<'wrld>) -> payloads::Migrate { + fn get_migrate_payload( + state: &state::executor::Migrate<'wrld, 'block, 'state>, + ) -> payloads::Migrate { payloads::Migrate { - block_height: state.wsv.0.height(), + block_height: state.state.0.height(), } } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::executor::Migrate<'wrld>, + _state: &state::executor::Migrate<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `migrate()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::executor::Migrate<'wrld>, + _state: &state::executor::Migrate<'wrld, 'block, 'state>, ) -> Validate { panic!("Executor `migrate()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] - fn get_validate_query_payload(_state: &state::executor::Migrate<'wrld>) -> Validate { + fn get_validate_query_payload( + _state: &state::executor::Migrate<'wrld, 'block, 'state>, + ) -> Validate { panic!("Executor `migrate()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> import::traits::SetPermissionTokenSchema> - for Runtime> +impl<'wrld, 'block, 'state> + import::traits::SetPermissionTokenSchema> + for Runtime> { #[codec::wrap] fn set_permission_token_schema( schema: PermissionTokenSchema, - state: &mut state::executor::Migrate<'wrld>, + state: &mut state::executor::Migrate<'wrld, 'block, 'state>, ) { debug!(%schema, "Setting permission token schema"); - state.wsv.0.set_permission_token_schema(schema) + state.state.0.world.set_permission_token_schema(schema) } } @@ -1460,132 +1508,142 @@ impl RuntimeBuilder { macro_rules! create_imports { ( $linker:ident, - $(export::$name:ident => $fn_path:path),* $(,)? + $ty:ty, + $(export::$name:ident => $fn:expr),* $(,)? ) => { $linker.func_wrap( WASM_MODULE, export::LOG, - Runtime::log, + |caller: ::wasmtime::Caller<$ty>, offset, len| Runtime::log(caller, offset, len), ) .and_then(|l| { l.func_wrap( WASM_MODULE, export::DBG, - Runtime::dbg, + |caller: ::wasmtime::Caller<$ty>, offset, len| Runtime::dbg(caller, offset, len), ) }) $(.and_then(|l| { l.func_wrap( WASM_MODULE, export::$name, - $fn_path, + $fn, ) }))* .map_err(Error::Initialization) }; } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'block, 'state> RuntimeBuilder> { /// Builds the [`Runtime`] for *Smart Contract* execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, - export::GET_SMART_CONTRACT_PAYLOAD => Runtime::get_smart_contract_payload, + create_imports!(linker, state::SmartContract<'wrld, 'block, 'state>, + export::EXECUTE_ISI => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_instruction(caller, offset, len), + export::EXECUTE_QUERY => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_query(caller, offset, len), + export::GET_SMART_CONTRACT_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_smart_contract_payload(caller), )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'block, 'state> RuntimeBuilder> { /// Builds the [`Runtime`] for *Trigger* execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, - export::GET_TRIGGER_PAYLOAD => Runtime::get_trigger_payload, + create_imports!(linker, state::Trigger<'wrld, 'block, 'state>, + export::EXECUTE_ISI => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_instruction(caller, offset, len), + export::EXECUTE_QUERY => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_query(caller, offset, len), + export::GET_TRIGGER_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_trigger_payload(caller), )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'block, 'state> + RuntimeBuilder> +{ /// Builds the [`Runtime`] for *Executor* `validate_transaction()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build( + self, + ) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, - export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + create_imports!(linker, state::executor::ValidateTransaction<'wrld, 'block, 'state>, + export::EXECUTE_ISI => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_instruction(caller, offset, len), + export::EXECUTE_QUERY => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_query(caller, offset, len), + export::GET_MIGRATE_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_migrate_payload(caller), + export::GET_VALIDATE_TRANSACTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_transaction_payload(caller), + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_instruction_payload(caller), + export::GET_VALIDATE_QUERY_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_query_payload(caller), + export::SET_PERMISSION_TOKEN_SCHEMA => |caller: ::wasmtime::Caller>, offset, len| Runtime::set_permission_token_schema(caller, offset, len), )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'block, 'state> + RuntimeBuilder> +{ /// Builds the [`Runtime`] for *Executor* `validate_instruction()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build( + self, + ) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, - export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + create_imports!(linker, state::executor::ValidateInstruction<'wrld, 'block, 'state>, + export::EXECUTE_ISI => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_instruction(caller, offset, len), + export::EXECUTE_QUERY => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_query(caller, offset, len), + export::GET_MIGRATE_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_migrate_payload(caller), + export::GET_VALIDATE_TRANSACTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_transaction_payload(caller), + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_instruction_payload(caller), + export::GET_VALIDATE_QUERY_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_query_payload(caller), + export::SET_PERMISSION_TOKEN_SCHEMA => |caller: ::wasmtime::Caller>, offset, len| Runtime::set_permission_token_schema(caller, offset, len), )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'state> RuntimeBuilder> { /// Builds the [`Runtime`] for *Executor* `validate_query()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, + // NOTE: doesn't need closure here because `ValidateQuery` is covariant over 'wrld so 'static can be used and substituted with appropriate lifetime + create_imports!(linker, state::executor::ValidateQuery<'_, '_>, + export::EXECUTE_ISI => Runtime::execute_instruction, + export::EXECUTE_QUERY => Runtime::execute_query, export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, @@ -1597,24 +1655,24 @@ impl<'wrld> RuntimeBuilder> { } } -impl<'wrld> RuntimeBuilder> { +impl<'wrld, 'block, 'state> RuntimeBuilder> { /// Builds the [`Runtime`] to execute `permission_tokens()` entrypoint of *Executor* /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); - create_imports!(linker, - export::EXECUTE_ISI => Runtime::>::execute_instruction, - export::EXECUTE_QUERY => Runtime::>::execute_query, - export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + create_imports!(linker, state::executor::Migrate<'wrld, 'block, 'state>, + export::EXECUTE_ISI => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_instruction(caller, offset, len), + export::EXECUTE_QUERY => |caller: ::wasmtime::Caller>, offset, len| Runtime::execute_query(caller, offset, len), + export::GET_MIGRATE_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_migrate_payload(caller), + export::GET_VALIDATE_TRANSACTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_transaction_payload(caller), + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_instruction_payload(caller), + export::GET_VALIDATE_QUERY_PAYLOAD => |caller: ::wasmtime::Caller>| Runtime::get_validate_query_payload(caller), + export::SET_PERMISSION_TOKEN_SCHEMA => |caller: ::wasmtime::Caller>, offset, len| Runtime::set_permission_token_schema(caller, offset, len), )?; Ok(linker) }) @@ -1649,8 +1707,8 @@ mod tests { use super::*; use crate::{ - kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, PeersIds, - World, + kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, + wsv::State, PeersIds, World, }; fn world_with_test_account(authority: &AccountId) -> World { @@ -1712,7 +1770,7 @@ mod tests { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); @@ -1743,7 +1801,7 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; runtime - .execute(&mut wsv, authority, wat) + .execute(&mut wsv.block(false).transaction(), authority, wat) .expect("Execution failed"); Ok(()) @@ -1754,7 +1812,7 @@ mod tests { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let query_hex = encode_hex(SmartContractQueryRequest::query( QueryBox::from(FindAccountById::new(authority.clone())), Sorting::default(), @@ -1786,7 +1844,7 @@ mod tests { let mut runtime = RuntimeBuilder::::new().build()?; runtime - .execute(&mut wsv, authority, wat) + .execute(&mut wsv.block(false).transaction(), authority, wat) .expect("Execution failed"); Ok(()) @@ -1798,7 +1856,7 @@ mod tests { let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); @@ -1829,7 +1887,7 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut wsv, authority, wat, 1); + let res = runtime.validate(&mut wsv.block(false).transaction(), authority, wat, 1); if let Error::ExportFnCall(ExportFnCallError::Other(report)) = res.expect_err("Execution should fail") @@ -1847,7 +1905,7 @@ mod tests { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); @@ -1878,7 +1936,7 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut wsv, authority, wat, 1); + let res = runtime.validate(&mut wsv.block(false).transaction(), authority, wat, 1); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") @@ -1896,7 +1954,7 @@ mod tests { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let query_hex = encode_hex(QueryBox::from(FindAccountById::new(authority.clone()))); let wat = format!( @@ -1922,7 +1980,7 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut wsv, authority, wat, 1); + let res = runtime.validate(&mut wsv.block(false).transaction(), authority, wat, 1); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") @@ -1938,7 +1996,7 @@ mod tests { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let wsv = State::new(world_with_test_account(&authority), kura, query_handle); let query_hex = encode_hex(QueryBox::from(FindAccountById::new(authority.clone()))); let wat = format!( @@ -1964,7 +2022,7 @@ mod tests { let mut runtime = RuntimeBuilder::::new().build()?; let err = runtime - .execute(&mut wsv, authority, wat) + .execute(&mut wsv.block(false).transaction(), authority, wat) .expect_err("Execution should fail"); assert!(matches!( diff --git a/core/src/snapshot.rs b/core/src/snapshot.rs index 8ccbd0b318e..e3712a5e2fc 100644 --- a/core/src/snapshot.rs +++ b/core/src/snapshot.rs @@ -1,4 +1,4 @@ -//! This module contains [`WorldStateView`] snapshot actor service. +//! This module contains [`State`] snapshot actor service. use std::{ io::Read, path::{Path, PathBuf}, @@ -16,13 +16,12 @@ use tokio::sync::mpsc; use crate::{ kura::{BlockCount, Kura}, query::store::LiveQueryStoreHandle, - sumeragi::SumeragiHandle, - wsv::{KuraSeed, WorldStateView}, + wsv::{deserialize::KuraSeed, State}, }; -/// Name of the [`WorldStateView`] snapshot file. +/// Name of the [`State`] snapshot file. const SNAPSHOT_FILE_NAME: &str = "snapshot.data"; -/// Name of the temporary [`WorldStateView`] snapshot file. +/// Name of the temporary [`State`] snapshot file. const SNAPSHOT_TMP_FILE_NAME: &str = "snapshot.tmp"; /// Errors produced by [`SnapshotMaker`] actor. @@ -35,17 +34,17 @@ pub struct SnapshotMakerHandle { _message_sender: mpsc::Sender<()>, } -/// Actor responsible for [`WorldStateView`] snapshot reading and writing. +/// Actor responsible for [`State`] snapshot reading and writing. pub struct SnapshotMaker { - sumeragi: SumeragiHandle, + wsv: Arc, /// Frequency at which snapshot is made snapshot_create_every: Duration, /// Path to the directory where snapshots are stored snapshot_dir: String, /// Flag to enable/disable snapshot creation snapshot_creation_enabled: bool, - /// Flag to signal that new wsv is available for taking snapshot - new_wsv_available: bool, + /// Hash of the lalest block stored in the wsv + latest_block_hash: Option>, } impl SnapshotMaker { @@ -70,18 +69,13 @@ impl SnapshotMaker { loop { tokio::select! { - _ = snapshot_create_every.tick(), if self.new_wsv_available => { + _ = snapshot_create_every.tick() => { // Offload snapshot creation into blocking thread self.create_snapshot().await; }, - _ = self.sumeragi.finalized_wsv_updated() => { - self.sumeragi.apply_finalized_wsv(|finalized_wsv| self.new_wsv_available = finalized_wsv.height() > 0); - } _ = message_receiver.recv() => { info!("All handler to SnapshotMaker are dropped. Saving latest snapshot and shutting down..."); - if self.new_wsv_available { - self.create_snapshot().await; - } + self.create_snapshot().await; break; } } @@ -91,25 +85,32 @@ impl SnapshotMaker { /// Invoke snapshot creation task async fn create_snapshot(&mut self) { - let sumeragi = self.sumeragi.clone(); let path_to_snapshot = self.snapshot_dir.clone(); - let handle = tokio::task::spawn_blocking(move || -> Result { - sumeragi.apply_finalized_wsv(|wsv| { - Self::try_write_snapshot(wsv, &path_to_snapshot)?; - Ok(wsv.height()) - }) - }); + let latest_block_hash; + let at_height; + { + let wsv = self.wsv.view(); + latest_block_hash = wsv.latest_block_hash(); + at_height = wsv.height(); + } - match handle.await { - Ok(Ok(at_height)) => { - iroha_logger::info!(at_height, "Snapshot for wsv was created successfully."); - self.new_wsv_available = false; - } - Ok(Err(error)) => { - iroha_logger::error!(%error, "Failed to create snapshot for wsv."); - } - Err(panic) => { - iroha_logger::error!(%panic, "Task panicked during creation of wsv snapshot."); + if latest_block_hash != self.latest_block_hash { + let wsv = self.wsv.clone(); + let handle = tokio::task::spawn_blocking(move || -> Result<()> { + Self::try_write_snapshot(&wsv, &path_to_snapshot) + }); + + match handle.await { + Ok(Ok(())) => { + iroha_logger::info!(at_height, "Snapshot for wsv was created successfully."); + self.latest_block_hash = latest_block_hash; + } + Ok(Err(error)) => { + iroha_logger::error!(%error, "Failed to create snapshot for wsv."); + } + Err(panic) => { + iroha_logger::error!(%panic, "Task panicked during creation of wsv snapshot."); + } } } } @@ -120,7 +121,7 @@ impl SnapshotMaker { /// # Errors /// - IO errors /// - Serialization errors - fn try_write_snapshot(wsv: &WorldStateView, snapshot_dir: impl AsRef) -> Result<()> { + fn try_write_snapshot(wsv: &State, snapshot_dir: impl AsRef) -> Result<()> { let path_to_file = snapshot_dir.as_ref().join(SNAPSHOT_FILE_NAME); let path_to_tmp_file = snapshot_dir.as_ref().join(SNAPSHOT_TMP_FILE_NAME); let file = std::fs::OpenOptions::new() @@ -137,18 +138,19 @@ impl SnapshotMaker { } /// Create [`Self`] from [`Configuration`] - pub fn from_configuration(config: &Configuration, sumeragi: SumeragiHandle) -> Self { + pub fn from_configuration(config: &Configuration, wsv: Arc) -> Self { + let latest_block_hash = wsv.view().latest_block_hash(); Self { - sumeragi, + wsv, snapshot_create_every: Duration::from_millis(config.create_every_ms), snapshot_dir: config.dir_path.clone(), snapshot_creation_enabled: config.creation_enabled, - new_wsv_available: false, + latest_block_hash, } } } -/// Try deserialize [`WorldStateView`] from snapshot file +/// Try deserialize [`State`] from snapshot file /// /// # Errors /// - IO errors @@ -158,7 +160,7 @@ pub fn try_read_snapshot( kura: &Arc, query_handle: LiveQueryStoreHandle, BlockCount(block_count): BlockCount, -) -> Result { +) -> Result { let mut bytes = Vec::new(); let path = snapshot_dir.as_ref().join(SNAPSHOT_FILE_NAME); let mut file = std::fs::OpenOptions::new() @@ -173,7 +175,8 @@ pub fn try_read_snapshot( query_handle, }; let wsv = seed.deserialize(&mut deserializer)?; - let snapshot_height = wsv.block_hashes.len(); + let view = wsv.view(); + let snapshot_height = view.block_hashes.len(); if snapshot_height > block_count { return Err(Error::MismatchedHeight { snapshot_height, @@ -184,7 +187,7 @@ pub fn try_read_snapshot( let kura_block_hash = kura .get_block_hash(height as u64) .expect("Kura has height at least as large as wsv_height"); - let snapshot_block_hash = wsv.block_hashes[height - 1]; + let snapshot_block_hash = view.block_hashes[height - 1]; if kura_block_hash != snapshot_block_hash { return Err(Error::MismatchedHash { height, @@ -201,7 +204,7 @@ pub fn try_read_snapshot( pub enum Error { /// Failed reading/writing {1:?} from disk IO(#[source] std::io::Error, PathBuf), - /// Error (de)serializing [`WorldStateView`] snapshot + /// Error (de)serializing [`State`] snapshot Serialization(#[from] serde_json::Error), /// Snapshot is in a non-consistent state. Snapshot has greater height ({snapshot_height}) than kura block store ({kura_height}) MismatchedHeight { diff --git a/core/src/sumeragi/main_loop.rs b/core/src/sumeragi/main_loop.rs index 441c0946b2d..a3060b8872c 100644 --- a/core/src/sumeragi/main_loop.rs +++ b/core/src/sumeragi/main_loop.rs @@ -22,10 +22,6 @@ pub struct Sumeragi { pub peer_id: PeerId, /// An actor that sends events pub events_sender: EventsSender, - /// The world state view instance that is used in public contexts - pub public_wsv_sender: watch::Sender, - /// The finalized world state view instance that is used in public contexts - pub public_finalized_wsv_sender: watch::Sender, /// Time by which a newly created block should be committed. Prevents malicious nodes /// from stalling the network by not participating in consensus pub commit_time: Duration, @@ -47,18 +43,6 @@ pub struct Sumeragi { pub debug_force_soft_fork: bool, /// The current network topology. pub current_topology: Topology, - /// The sumeragi internal [`WorldStateView`]. This will probably - /// morph into a wsv + various patches as we attempt to - /// multithread isi execution. In the future we might also once - /// again merge the internal wsv with the public facing one. But - /// as of now we keep them separate for greater flexibility when - /// optimizing. - pub wsv: WorldStateView, - /// A copy of wsv that is kept one block behind at all times. Because - /// we currently don't support rolling back wsv block application we - /// reset to a copy of the finalized_wsv instead. This is expensive but - /// enables us to handle soft-forks. - pub finalized_wsv: WorldStateView, /// In order to *be fast*, we must minimize communication with /// other subsystems where we can. This way the performance of /// sumeragi is more dependent on the code that is internal to the @@ -137,6 +121,7 @@ impl Sumeragi { fn receive_network_packet( &self, + wsv: &StateView<'_>, view_change_proof_chain: &mut ProofChain, control_message_in_a_row_counter: &mut usize, ) -> Option { @@ -174,7 +159,7 @@ impl Sumeragi { packet.view_change_proofs, &self.current_topology.ordered_peers, self.current_topology.max_faults(), - self.wsv.latest_block_hash(), + wsv.latest_block_hash(), ) { trace!(%error, "Failed to add proofs into view change proof chain") } @@ -184,6 +169,7 @@ impl Sumeragi { fn init_listen_for_genesis( &mut self, + wsv: &State, shutdown_receiver: &mut tokio::sync::oneshot::Receiver<()>, ) -> Result<(), EarlyReturn> { trace!("Listen for genesis"); @@ -198,8 +184,6 @@ impl Sumeragi { match self.message_receiver.try_recv() { Ok(packet) => { if let Some(message) = packet.message { - let mut new_wsv = self.wsv.clone(); - let block = match message { Message::BlockCreated(BlockCreated { block }) | Message::BlockSyncUpdate(BlockSyncUpdate { block }) => block, @@ -209,6 +193,7 @@ impl Sumeragi { } }; + let mut new_wsv = wsv.block(false); let block = match ValidBlock::validate(block, &self.current_topology, &mut new_wsv) .and_then(|block| { @@ -233,11 +218,14 @@ impl Sumeragi { } } - fn sumeragi_init_commit_genesis(&mut self, genesis_network: GenesisNetwork) { + fn sumeragi_init_commit_genesis(&mut self, genesis_network: GenesisNetwork, wsv: &State) { std::thread::sleep(Duration::from_millis(250)); - assert_eq!(self.wsv.height(), 0); - assert_eq!(self.wsv.latest_block_hash(), None); + { + let wsv_view = wsv.view(); + assert_eq!(wsv_view.height(), 0); + assert_eq!(wsv_view.latest_block_hash(), None); + } let transactions: Vec<_> = genesis_network .transactions @@ -245,7 +233,7 @@ impl Sumeragi { .map(AcceptedTransaction::accept_genesis) .collect(); - let mut new_wsv = self.wsv.clone(); + let mut new_wsv = wsv.block(false); let genesis = BlockBuilder::new(transactions, self.current_topology.clone(), vec![]) .chain(0, &mut new_wsv) .sign(self.key_pair.clone()) @@ -279,44 +267,38 @@ impl Sumeragi { self.broadcast_packet(genesis_msg); } - fn commit_block(&mut self, block: CommittedBlock, new_wsv: WorldStateView) { + fn commit_block(&mut self, block: CommittedBlock, new_wsv: StateBlock<'_>) { self.update_state::(block, new_wsv); } - fn replace_top_block(&mut self, block: CommittedBlock, new_wsv: WorldStateView) { + fn replace_top_block(&mut self, block: CommittedBlock, new_wsv: StateBlock<'_>) { self.update_state::(block, new_wsv); } fn update_state( &mut self, block: CommittedBlock, - mut new_wsv: WorldStateView, + mut new_wsv: StateBlock<'_>, ) { info!( addr=%self.peer_id.address, role=%self.current_topology.role(&self.peer_id), - block_height=%self.wsv.height(), + block_height=%new_wsv.height(), block_hash=%block.hash(), "{}", Strategy::LOG_MESSAGE, ); - Strategy::before_update_hook(self); - new_wsv .apply_without_execution(&block) .expect("Failed to apply block on WSV. Bailing."); - self.wsv = new_wsv; - let wsv_events = core::mem::take(&mut self.wsv.events_buffer); + let wsv_events = core::mem::take(&mut new_wsv.world.events_buffer); self.send_events(wsv_events); - // Parameters are updated before updating public copy of sumeragi - self.update_params(); - let new_topology = Topology::recreate_topology( block.as_ref(), 0, - self.wsv.peers_ids().iter().cloned().collect(), + new_wsv.world.peers_ids().iter().cloned().collect(), ); let events = block.produce_events(); @@ -325,63 +307,57 @@ impl Sumeragi { // Public-facing WSV update should happen after that and be followed by `BlockCommited` event to prevent client access to uncommitted data. Strategy::kura_store_block(&self.kura, block); - // Update WSV copy that is public facing - self.public_wsv_sender - .send_modify(|public_wsv| *public_wsv = self.wsv.clone()); - self.public_finalized_wsv_sender - .send_if_modified(|public_finalized_wsv| { - if public_finalized_wsv.height() < self.finalized_wsv.height() { - *public_finalized_wsv = self.finalized_wsv.clone(); - true - } else { - false - } - }); + // Parameters are updated before updating public copy of sumeragi + self.update_params(&new_wsv); + self.cache_transaction(&new_wsv); + self.current_topology = new_topology; + self.connect_peers(&self.current_topology); + // Commit new block making it's effect visible for the rest of application + new_wsv.commit(); // NOTE: This sends "Block committed" event, // so it should be done AFTER public facing WSV update self.send_events(events); - self.current_topology = new_topology; - self.connect_peers(&self.current_topology); - - self.cache_transaction(); } - fn update_params(&mut self) { + fn update_params(&mut self, new_wsv: &StateBlock<'_>) { use iroha_data_model::parameter::default::*; - if let Some(block_time) = self.wsv.query_param(BLOCK_TIME) { + if let Some(block_time) = new_wsv.world.query_param(BLOCK_TIME) { self.block_time = Duration::from_millis(block_time); } - if let Some(commit_time) = self.wsv.query_param(COMMIT_TIME_LIMIT) { + if let Some(commit_time) = new_wsv.world.query_param(COMMIT_TIME_LIMIT) { self.commit_time = Duration::from_millis(commit_time); } - if let Some(max_txs_in_block) = self.wsv.query_param::(MAX_TRANSACTIONS_IN_BLOCK) { + if let Some(max_txs_in_block) = new_wsv + .world + .query_param::(MAX_TRANSACTIONS_IN_BLOCK) + { self.max_txs_in_block = max_txs_in_block as usize; } } - fn cache_transaction(&mut self) { + fn cache_transaction(&mut self, wsv_block: &StateBlock<'_>) { self.transaction_cache - .retain(|tx| !self.wsv.has_transaction(tx.hash()) && !self.queue.is_expired(tx)); + .retain(|tx| !wsv_block.has_transaction(tx.hash()) && !self.queue.is_expired(tx)); } } fn suggest_view_change( sumeragi: &Sumeragi, + wsv_view: &StateView<'_>, view_change_proof_chain: &mut ProofChain, current_view_change_index: u64, ) { - let suspect_proof = - ProofBuilder::new(sumeragi.wsv.latest_block_hash(), current_view_change_index) - .sign(sumeragi.key_pair.clone()) - .expect("Proof signing failed"); + let suspect_proof = ProofBuilder::new(wsv_view.latest_block_hash(), current_view_change_index) + .sign(sumeragi.key_pair.clone()) + .expect("Proof signing failed"); view_change_proof_chain .insert_proof( &sumeragi.current_topology.ordered_peers, sumeragi.current_topology.max_faults(), - sumeragi.wsv.latest_block_hash(), + wsv_view.latest_block_hash(), suspect_proof, ) .unwrap_or_else(|err| error!("{err}")); @@ -392,21 +368,23 @@ fn suggest_view_change( fn prune_view_change_proofs_and_calculate_current_index( sumeragi: &Sumeragi, + wsv_view: &StateView<'_>, view_change_proof_chain: &mut ProofChain, ) -> u64 { - view_change_proof_chain.prune(sumeragi.wsv.latest_block_hash()); + view_change_proof_chain.prune(wsv_view.latest_block_hash()); view_change_proof_chain.verify_with_state( &sumeragi.current_topology.ordered_peers, sumeragi.current_topology.max_faults(), - sumeragi.wsv.latest_block_hash(), + wsv_view.latest_block_hash(), ) as u64 } #[allow(clippy::too_many_lines)] -fn handle_message( +fn handle_message<'state>( message: Message, sumeragi: &mut Sumeragi, - voting_block: &mut Option, + wsv: &'state State, + voting_block: &mut Option>, current_view_change_index: u64, view_change_proof_chain: &mut ProofChain, voting_signatures: &mut Vec>, @@ -421,15 +399,17 @@ fn handle_message( let block_hash = block.hash(); info!(%addr, %role, hash=%block_hash, "Block sync update received"); - match handle_block_sync(block, &sumeragi.wsv, &sumeragi.finalized_wsv) { + // Release writer before handling block sync + let _ = voting_block.take(); + match handle_block_sync(block, wsv) { Ok(BlockSyncOk::CommitBlock(block, new_wsv)) => { sumeragi.commit_block(block, new_wsv) } Ok(BlockSyncOk::ReplaceTopBlock(block, new_wsv)) => { warn!( %addr, %role, - peer_latest_block_hash=?sumeragi.wsv.latest_block_hash(), - peer_latest_block_view_change_index=?sumeragi.wsv.latest_block_view_change_index(), + peer_latest_block_hash=?new_wsv.latest_block_hash(), + peer_latest_block_view_change_index=?new_wsv.latest_block_view_change_index(), consensus_latest_block_hash=%block.hash(), consensus_latest_block_view_change_index=%block.payload().header.view_change_index, "Soft fork occurred: peer in inconsistent state. Rolling back and replacing top block." @@ -451,7 +431,7 @@ fn handle_message( )) => { debug!( %addr, %role, - peer_latest_block_hash=?sumeragi.wsv.latest_block_hash(), + peer_latest_block_hash=?wsv.view().latest_block_hash(), peer_latest_block_view_change_index=?peer_view_change_index, consensus_latest_block_hash=%block_hash, consensus_latest_block_view_change_index=%block_view_change_index, @@ -510,7 +490,9 @@ fn handle_message( .is_consensus_required() .expect("Peer has `ValidatingPeer` role, which mean that current topology require consensus"); - if let Some(v_block) = vote_for_block(sumeragi, ¤t_topology, block_created) { + // Release block writer before creating new one + let _ = voting_block.take(); + if let Some(v_block) = vote_for_block(sumeragi, wsv, ¤t_topology, block_created) { let block_hash = v_block.block.payload().hash(); let msg = MessagePacket::new( @@ -529,7 +511,9 @@ fn handle_message( "Peer has `ObservingPeer` role, which mean that current topology require consensus", ); - if let Some(v_block) = vote_for_block(sumeragi, ¤t_topology, block_created) { + // Release block writer before creating new one + let _ = voting_block.take(); + if let Some(v_block) = vote_for_block(sumeragi, wsv, ¤t_topology, block_created) { if current_view_change_index >= 1 { let block_hash = v_block.block.payload().hash(); @@ -547,7 +531,11 @@ fn handle_message( } } (Message::BlockCreated(block_created), Role::ProxyTail) => { - if let Some(mut new_block) = vote_for_block(sumeragi, current_topology, block_created) { + // Release block writer before creating new one + let _ = voting_block.take(); + if let Some(mut new_block) = + vote_for_block(sumeragi, wsv, current_topology, block_created) + { // NOTE: Up until this point it was unknown which block is expected to be received, // therefore all the signatures (of any hash) were collected and will now be pruned add_signatures::(&mut new_block, voting_signatures.drain(..)); @@ -585,9 +573,10 @@ fn handle_message( } #[allow(clippy::too_many_lines)] -fn process_message_independent( +fn process_message_independent<'state>( sumeragi: &mut Sumeragi, - voting_block: &mut Option, + wsv: &'state State, + voting_block: &mut Option>, current_view_change_index: u64, view_change_proof_chain: &mut ProofChain, round_start_time: &Instant, @@ -609,7 +598,7 @@ fn process_message_independent( info!(%addr, txns=%transactions.len(), "Creating block..."); // TODO: properly process triggers! - let mut new_wsv = sumeragi.wsv.clone(); + let mut new_wsv = wsv.block(false); let event_recommendations = Vec::new(); let new_block = match BlockBuilder::new( transactions, @@ -779,20 +768,19 @@ pub(crate) fn run( genesis_network: Option, mut sumeragi: Sumeragi, mut shutdown_receiver: tokio::sync::oneshot::Receiver<()>, + wsv: Arc, ) { // Connect peers with initial topology sumeragi.connect_peers(&sumeragi.current_topology); let span = span!(tracing::Level::TRACE, "genesis").entered(); - let is_genesis_peer = if sumeragi.wsv.height() == 0 - || sumeragi.wsv.latest_block_hash().is_none() - { + let is_genesis_peer = if wsv.view().height() == 0 || wsv.view().latest_block_hash().is_none() { if let Some(genesis_network) = genesis_network { - sumeragi.sumeragi_init_commit_genesis(genesis_network); + sumeragi.sumeragi_init_commit_genesis(genesis_network, &wsv); true } else { sumeragi - .init_listen_for_genesis(&mut shutdown_receiver) + .init_listen_for_genesis(&wsv, &mut shutdown_receiver) .unwrap_or_else(|err| assert_ne!(EarlyReturn::Disconnected, err, "Disconnected")); false } @@ -813,8 +801,8 @@ pub(crate) fn run( let mut should_sleep = false; let mut view_change_proof_chain = ProofChain::default(); let mut old_view_change_index = 0; - let mut old_latest_block_hash = sumeragi - .wsv + let mut old_latest_block_hash = wsv + .view() .latest_block_ref() .expect("WSV must have blocks") .hash(); @@ -838,6 +826,8 @@ pub(crate) fn run( let span_for_sumeragi_cycle = span!(Level::TRACE, "main_thread_cycle"); let _enter_for_sumeragi_cycle = span_for_sumeragi_cycle.enter(); + let wsv_view = wsv.view(); + sumeragi .transaction_cache // Checking if transactions are in the blockchain is costly @@ -851,7 +841,7 @@ pub(crate) fn run( let mut expired_transactions = Vec::new(); sumeragi.queue.get_transactions_for_block( - &sumeragi.wsv, + &wsv_view, sumeragi.max_txs_in_block, &mut sumeragi.transaction_cache, &mut expired_transactions, @@ -860,6 +850,7 @@ pub(crate) fn run( let current_view_change_index = prune_view_change_proofs_and_calculate_current_index( &sumeragi, + &wsv_view, &mut view_change_proof_chain, ); @@ -869,10 +860,7 @@ pub(crate) fn run( current_view_change_index, &mut old_view_change_index, &mut old_latest_block_hash, - &sumeragi - .wsv - .latest_block_ref() - .expect("WSV must have blocks"), + &wsv_view.latest_block_ref().expect("WSV must have blocks"), &mut sumeragi.current_topology, &mut voting_block, &mut voting_signatures, @@ -896,6 +884,7 @@ pub(crate) fn run( suggest_view_change( &sumeragi, + &wsv_view, &mut view_change_proof_chain, current_view_change_index, ); @@ -907,6 +896,7 @@ pub(crate) fn run( sumeragi .receive_network_packet( + &wsv_view, &mut view_change_proof_chain, &mut control_message_in_a_row_counter, ) @@ -918,6 +908,7 @@ pub(crate) fn run( handle_message( message, &mut sumeragi, + &wsv, &mut voting_block, current_view_change_index, &mut view_change_proof_chain, @@ -927,8 +918,10 @@ pub(crate) fn run( ); // State could be changed after handling message so it is necessary to reset state before handling message independent step + let wsv_view = wsv.view(); let current_view_change_index = prune_view_change_proofs_and_calculate_current_index( &sumeragi, + &wsv_view, &mut view_change_proof_chain, ); @@ -938,10 +931,7 @@ pub(crate) fn run( current_view_change_index, &mut old_view_change_index, &mut old_latest_block_hash, - &sumeragi - .wsv - .latest_block_ref() - .expect("WSV must have blocks"), + &wsv_view.latest_block_ref().expect("WSV must have blocks"), &mut sumeragi.current_topology, &mut voting_block, &mut voting_signatures, @@ -952,6 +942,7 @@ pub(crate) fn run( process_message_independent( &mut sumeragi, + &wsv, &mut voting_block, current_view_change_index, &mut view_change_proof_chain, @@ -990,17 +981,18 @@ fn expired_event(txn: &AcceptedTransaction) -> Event { .into() } -fn vote_for_block( +fn vote_for_block<'state>( sumeragi: &Sumeragi, + wsv: &'state State, topology: &Topology, BlockCreated { block }: BlockCreated, -) -> Option { +) -> Option> { let block_hash = block.payload().hash(); let addr = &sumeragi.peer_id.address; let role = sumeragi.current_topology.role(&sumeragi.peer_id); trace!(%addr, %role, block_hash=%block_hash, "Block received, voting..."); - let mut new_wsv = sumeragi.wsv.clone(); + let mut new_wsv = wsv.block(false); let block = match ValidBlock::validate(block, topology, &mut new_wsv) { Ok(block) => block, Err((_, error)) => { @@ -1048,10 +1040,6 @@ fn early_return( trait ApplyBlockStrategy { const LOG_MESSAGE: &'static str; - /// Perform necessary changes in sumeragi before applying block. - /// Like updating `wsv` or `finalized_wsv`. - fn before_update_hook(sumeragi: &mut Sumeragi); - /// Operation to invoke in kura to store block. fn kura_store_block(kura: &Kura, block: CommittedBlock); } @@ -1062,13 +1050,6 @@ struct NewBlockStrategy; impl ApplyBlockStrategy for NewBlockStrategy { const LOG_MESSAGE: &'static str = "Committing block"; - #[inline] - fn before_update_hook(sumeragi: &mut Sumeragi) { - // Save current wsv state in case of rollback in the future - // Use swap to avoid cloning since `wsv` will be overwritten anyway by `new_wsv` - core::mem::swap(&mut sumeragi.finalized_wsv, &mut sumeragi.wsv); - } - #[inline] fn kura_store_block(kura: &Kura, block: CommittedBlock) { kura.store_block(block) @@ -1081,20 +1062,15 @@ struct ReplaceTopBlockStrategy; impl ApplyBlockStrategy for ReplaceTopBlockStrategy { const LOG_MESSAGE: &'static str = "Replacing top block"; - #[inline] - fn before_update_hook(_sumeragi: &mut Sumeragi) { - // Do nothing since valid new_wsv already provided - } - #[inline] fn kura_store_block(kura: &Kura, block: CommittedBlock) { kura.replace_top_block(block) } } -enum BlockSyncOk { - CommitBlock(CommittedBlock, WorldStateView), - ReplaceTopBlock(CommittedBlock, WorldStateView), +enum BlockSyncOk<'state> { + CommitBlock(CommittedBlock, StateBlock<'state>), + ReplaceTopBlock(CommittedBlock, StateBlock<'state>), } #[derive(Debug)] @@ -1113,19 +1089,18 @@ enum BlockSyncError { fn handle_block_sync( block: SignedBlock, - wsv: &WorldStateView, - finalized_wsv: &WorldStateView, + wsv: &State, ) -> Result { let block_height = block.payload().header.height; - let wsv_height = wsv.height(); + let wsv_height = wsv.view().height(); if wsv_height + 1 == block_height { // Normal branch for adding new block on top of current - let mut new_wsv = wsv.clone(); + let mut new_wsv = wsv.block(false); let topology = { let last_committed_block = new_wsv .latest_block_ref() .expect("Not in genesis round so must have at least genesis block"); - let new_peers = new_wsv.peers_ids().clone(); + let new_peers = new_wsv.world.peers_ids().clone(); let view_change_index = block.payload().header().view_change_index; Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) }; @@ -1140,12 +1115,25 @@ fn handle_block_sync( } else if wsv_height == block_height && block_height > 1 { // Soft-fork on genesis block isn't possible // Soft fork branch for replacing current block with valid one - let mut new_wsv = finalized_wsv.clone(); + + let peer_view_change_index = wsv.view().latest_block_view_change_index(); + let block_view_change_index = block.payload().header.view_change_index; + if peer_view_change_index >= block_view_change_index { + return Err(( + block, + BlockSyncError::SoftForkBlockSmallViewChangeIndex { + peer_view_change_index, + block_view_change_index, + }, + )); + } + + let mut new_wsv = wsv.block(true); let topology = { let last_committed_block = new_wsv .latest_block_ref() .expect("Not in genesis round so must have at least genesis block"); - let new_peers = new_wsv.peers_ids().clone(); + let new_peers = new_wsv.world.peers_ids().clone(); let view_change_index = block.payload().header().view_change_index; Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) }; @@ -1156,21 +1144,7 @@ fn handle_block_sync( .map_err(|(block, err)| (block.into(), err)) }) .map_err(|(block, error)| (block, BlockSyncError::SoftForkBlockNotValid(error))) - .and_then(|block| { - let peer_view_change_index = wsv.latest_block_view_change_index(); - let block_view_change_index = block.payload().header.view_change_index; - if peer_view_change_index < block_view_change_index { - Ok(BlockSyncOk::ReplaceTopBlock(block, new_wsv)) - } else { - Err(( - block.into(), - BlockSyncError::SoftForkBlockSmallViewChangeIndex { - peer_view_change_index, - block_view_change_index, - }, - )) - } - }) + .map(|block| BlockSyncOk::ReplaceTopBlock(block, new_wsv)) } else { // Error branch other peer send irrelevant block Err(( @@ -1194,7 +1168,7 @@ mod tests { fn create_data_for_test( topology: &Topology, leader_key_pair: KeyPair, - ) -> (WorldStateView, Arc, SignedBlock) { + ) -> (State, Arc, SignedBlock) { // Predefined world state let alice_id: AccountId = "alice@wonderland".parse().expect("Valid"); let alice_keys = KeyPair::generate().expect("Valid"); @@ -1206,58 +1180,70 @@ mod tests { let world = World::with([domain], topology.ordered_peers.clone()); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world, Arc::clone(&kura), query_handle); + let wsv = State::new(world, Arc::clone(&kura), query_handle); // Create "genesis" block // Creating an instruction let fail_box: InstructionExpr = Fail::new("Dummy isi").into(); + let mut wsv_block = wsv.block(false); // Making two transactions that have the same instruction let tx = TransactionBuilder::new(alice_id.clone()) .with_instructions([fail_box]) .sign(alice_keys.clone()) .expect("Valid"); - let tx = AcceptedTransaction::accept(tx, &wsv.transaction_executor().transaction_limits) - .expect("Valid"); + let tx = + AcceptedTransaction::accept(tx, &wsv_block.transaction_executor().transaction_limits) + .expect("Valid"); // Creating a block of two identical transactions and validating it let block = BlockBuilder::new(vec![tx.clone(), tx], topology.clone(), Vec::new()) - .chain(0, &mut wsv) + .chain(0, &mut wsv_block) .sign(leader_key_pair.clone()) .expect("Block is valid"); let genesis = block.commit(topology).expect("Block is valid"); - wsv.apply(&genesis).expect("Failed to apply block"); + wsv_block.apply(&genesis).expect("Failed to apply block"); + wsv_block.commit(); kura.store_block(genesis); - // Making two transactions that have the same instruction - let create_asset_definition1 = RegisterExpr::new(AssetDefinition::quantity( - "xor1#wonderland".parse().expect("Valid"), - )); - let create_asset_definition2 = RegisterExpr::new(AssetDefinition::quantity( - "xor2#wonderland".parse().expect("Valid"), - )); - - let tx1 = TransactionBuilder::new(alice_id.clone()) - .with_instructions([create_asset_definition1]) - .sign(alice_keys.clone()) - .expect("Valid"); - let tx1 = AcceptedTransaction::accept(tx1, &wsv.transaction_executor().transaction_limits) + let block = { + let mut wsv_block = wsv.block(false); + // Making two transactions that have the same instruction + let create_asset_definition1 = RegisterExpr::new(AssetDefinition::quantity( + "xor1#wonderland".parse().expect("Valid"), + )); + let create_asset_definition2 = RegisterExpr::new(AssetDefinition::quantity( + "xor2#wonderland".parse().expect("Valid"), + )); + + let tx1 = TransactionBuilder::new(alice_id.clone()) + .with_instructions([create_asset_definition1]) + .sign(alice_keys.clone()) + .expect("Valid"); + let tx1 = AcceptedTransaction::accept( + tx1, + &wsv_block.transaction_executor().transaction_limits, + ) .map(Into::into) .expect("Valid"); - let tx2 = TransactionBuilder::new(alice_id) - .with_instructions([create_asset_definition2]) - .sign(alice_keys) - .expect("Valid"); - let tx2 = AcceptedTransaction::accept(tx2, &wsv.transaction_executor().transaction_limits) + let tx2 = TransactionBuilder::new(alice_id) + .with_instructions([create_asset_definition2]) + .sign(alice_keys) + .expect("Valid"); + let tx2 = AcceptedTransaction::accept( + tx2, + &wsv_block.transaction_executor().transaction_limits, + ) .map(Into::into) .expect("Valid"); - // Creating a block of two identical transactions and validating it - let block = BlockBuilder::new(vec![tx1, tx2], topology.clone(), Vec::new()) - .chain(0, &mut wsv.clone()) - .sign(leader_key_pair) - .expect("Block is valid"); + // Creating a block of two identical transactions and validating it + BlockBuilder::new(vec![tx1, tx2], topology.clone(), Vec::new()) + .chain(0, &mut wsv_block) + .sign(leader_key_pair) + .expect("Block is valid") + }; (wsv, kura, block.into()) } @@ -1270,13 +1256,12 @@ mod tests { &"127.0.0.1:8080".parse().unwrap(), leader_key_pair.public_key(), )]); - let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); - let wsv = finalized_wsv.clone(); + let (wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); // Malform block to make it invalid block.payload_mut().commit_topology.clear(); - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!(result, Err((_, BlockSyncError::BlockNotValid(_))))) } @@ -1287,19 +1272,23 @@ mod tests { &"127.0.0.1:8080".parse().unwrap(), leader_key_pair.public_key(), )]); - let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); - let mut wsv = finalized_wsv.clone(); + let (wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); - let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let mut wsv_block = wsv.block(false); + let validated_block = + ValidBlock::validate(block.clone(), &topology, &mut wsv_block).unwrap(); let committed_block = validated_block.commit(&topology).expect("Block is valid"); - wsv.apply_without_execution(&committed_block) + wsv_block + .apply_without_execution(&committed_block) .expect("Failed to apply block"); + wsv_block.commit(); kura.store_block(committed_block); // Malform block to make it invalid block.payload_mut().commit_topology.clear(); + block.payload_mut().header.view_change_index = 1; - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!( result, Err((_, BlockSyncError::SoftForkBlockNotValid(_))) @@ -1311,13 +1300,12 @@ mod tests { async fn block_sync_not_proper_height() { let topology = Topology::new(UniqueVec::new()); let leader_key_pair = KeyPair::generate().unwrap(); - let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); - let wsv = finalized_wsv.clone(); + let (wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); // Change block height block.payload_mut().header.height = 42; - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!( result, Err(( @@ -1338,9 +1326,8 @@ mod tests { &"127.0.0.1:8080".parse().unwrap(), leader_key_pair.public_key(), )]); - let (finalized_wsv, _, block) = create_data_for_test(&topology, leader_key_pair); - let wsv = finalized_wsv.clone(); - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let (wsv, _, block) = create_data_for_test(&topology, leader_key_pair); + let result = handle_block_sync(block, &wsv); assert!(matches!(result, Ok(BlockSyncOk::CommitBlock(_, _)))) } @@ -1351,20 +1338,24 @@ mod tests { &"127.0.0.1:8080".parse().unwrap(), leader_key_pair.public_key(), )]); - let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); - let mut wsv = finalized_wsv.clone(); + let (wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); - let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let mut wsv_block = wsv.block(false); + let validated_block = + ValidBlock::validate(block.clone(), &topology, &mut wsv_block).unwrap(); let committed_block = validated_block.commit(&topology).expect("Block is valid"); - wsv.apply_without_execution(&committed_block) + wsv_block + .apply_without_execution(&committed_block) .expect("Failed to apply block"); + wsv_block.commit(); + kura.store_block(committed_block); - assert_eq!(wsv.latest_block_view_change_index(), 0); + assert_eq!(wsv.view().latest_block_view_change_index(), 0); // Increase block view change index block.payload_mut().header.view_change_index = 42; - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!(result, Ok(BlockSyncOk::ReplaceTopBlock(_, _)))) } @@ -1375,23 +1366,26 @@ mod tests { &"127.0.0.1:8080".parse().unwrap(), leader_key_pair.public_key(), )]); - let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); - let mut wsv = finalized_wsv.clone(); + let (wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); // Increase block view change index block.payload_mut().header.view_change_index = 42; - let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let mut wsv_block = wsv.block(false); + let validated_block = + ValidBlock::validate(block.clone(), &topology, &mut wsv_block).unwrap(); let committed_block = validated_block.commit(&topology).expect("Block is valid"); - wsv.apply_without_execution(&committed_block) + wsv_block + .apply_without_execution(&committed_block) .expect("Failed to apply block"); + wsv_block.commit(); kura.store_block(committed_block); - assert_eq!(wsv.latest_block_view_change_index(), 42); + assert_eq!(wsv.view().latest_block_view_change_index(), 42); // Decrease block view change index back block.payload_mut().header.view_change_index = 0; - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!( result, Err(( @@ -1409,15 +1403,14 @@ mod tests { async fn block_sync_genesis_block_do_not_replace() { let topology = Topology::new(UniqueVec::new()); let leader_key_pair = KeyPair::generate().unwrap(); - let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); - let wsv = finalized_wsv.clone(); + let (wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); // Change block height and view change index // Soft-fork on genesis block is not possible block.payload_mut().header.view_change_index = 42; block.payload_mut().header.height = 1; - let result = handle_block_sync(block, &wsv, &finalized_wsv); + let result = handle_block_sync(block, &wsv); assert!(matches!( result, Err(( diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index b6f3c7391f0..dc4f846c581 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -15,9 +15,13 @@ use iroha_genesis::GenesisNetwork; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Metrics; use network_topology::{Role, Topology}; -use tokio::sync::watch; -use crate::{block::ValidBlock, handler::ThreadHandler, kura::BlockCount}; +use crate::{ + block::ValidBlock, + handler::ThreadHandler, + kura::BlockCount, + wsv::{State, StateBlock}, +}; pub mod main_loop; pub mod message; @@ -45,8 +49,7 @@ struct LastUpdateMetricsData { /// Handle to `Sumeragi` actor #[derive(Clone)] pub struct SumeragiHandle { - public_wsv_receiver: watch::Receiver, - public_finalized_wsv_receiver: watch::Receiver, + wsv: Arc, metrics: Metrics, last_update_metrics_mutex: Arc>, network: IrohaNetwork, @@ -59,46 +62,6 @@ pub struct SumeragiHandle { } impl SumeragiHandle { - /// Pass closure inside and apply fn to [`WorldStateView`]. - /// This function must be used with very cheap closures. - /// So that it costs no more than cloning wsv. - pub fn apply_wsv(&self, f: impl FnOnce(&WorldStateView) -> T) -> T { - f(&self.public_wsv_receiver.borrow()) - } - - /// Get public clone of [`WorldStateView`]. - pub fn wsv_clone(&self) -> WorldStateView { - self.public_wsv_receiver.borrow().clone() - } - - /// Notify when [`WorldStateView`] is updated. - pub async fn wsv_updated(&mut self) { - self.public_wsv_receiver - .changed() - .await - .expect("Shouldn't return error as long as there is at least one SumeragiHandle"); - } - - /// Pass closure inside and apply fn to finalized [`WorldStateView`]. - /// This function must be used with very cheap closures. - /// So that it costs no more than cloning wsv. - pub fn apply_finalized_wsv(&self, f: impl FnOnce(&WorldStateView) -> T) -> T { - f(&self.public_finalized_wsv_receiver.borrow()) - } - - /// Get public clone of finalized [`WorldStateView`]. - pub fn finalized_wsv_clone(&self) -> WorldStateView { - self.public_finalized_wsv_receiver.borrow().clone() - } - - /// Notify when finalized [`WorldStateView`] is updated. - pub async fn finalized_wsv_updated(&mut self) { - self.public_finalized_wsv_receiver - .changed() - .await - .expect("Shouldn't return error as long as there is at least one SumeragiHandle"); - } - /// Update the metrics on the world state view. /// /// # Errors @@ -114,7 +77,7 @@ impl SumeragiHandle { .try_into() .expect("casting usize to u64"); - let wsv = self.wsv_clone(); + let wsv = self.wsv.view(); let mut last_guard = self.last_update_metrics_mutex.lock(); @@ -176,9 +139,8 @@ impl SumeragiHandle { self.metrics.connected_peers.set(online_peers_count); - let domains = wsv.domains(); - self.metrics.domains.set(domains.len() as u64); - for domain in domains.values() { + self.metrics.domains.set(wsv.world.domains.len() as u64); + for domain in wsv.world.domains() { self.metrics .accounts .get_metric_with_label_values(&[domain.id().name.as_ref()]) @@ -232,7 +194,7 @@ impl SumeragiHandle { SumeragiStartArgs { configuration, events_sender, - mut wsv, + wsv, queue, kura, network, @@ -243,66 +205,54 @@ impl SumeragiHandle { let (control_message_sender, control_message_receiver) = mpsc::sync_channel(100); let (message_sender, message_receiver) = mpsc::sync_channel(100); - let skip_block_count = wsv.block_hashes.len(); - let mut blocks_iter = (skip_block_count + 1..=block_count).map(|block_height| { - kura.get_block_by_height(block_height as u64).expect( - "Sumeragi should be able to load the block that was reported as presented. \ - If not, the block storage was probably disconnected.", - ) - }); + let blocks_iter; + let current_topology; - let current_topology = match wsv.height() { - 0 => { - assert!(!configuration.trusted_peers.peers.is_empty()); - Topology::new(configuration.trusted_peers.peers.clone()) - } - height => { - let block_ref = kura.get_block_by_height(height).expect( - "Sumeragi could not load block that was reported as present. \ - Please check that the block storage was not disconnected.", - ); - Topology::recreate_topology( - &block_ref, - 0, - wsv.peers_ids().iter().cloned().collect(), + { + let view = wsv.view(); + let skip_block_count = view.block_hashes.len(); + blocks_iter = (skip_block_count + 1..=block_count).map(|block_height| { + kura.get_block_by_height(block_height as u64).expect( + "Sumeragi should be able to load the block that was reported as presented. \ + If not, the block storage was probably disconnected.", ) - } - }; + }); - let block_iter_except_last = - (&mut blocks_iter).take(block_count.saturating_sub(skip_block_count + 1)); - for block in block_iter_except_last { - let block = ValidBlock::validate(Clone::clone(&block), ¤t_topology, &mut wsv) - .expect("Kura blocks should be valid") - .commit(¤t_topology) - .expect("Kura blocks should be valid"); - wsv.apply_without_execution(&block).expect( - "Block application in init should not fail. \ - Blocks loaded from kura assumed to be valid", - ); + current_topology = match view.height() { + 0 => { + assert!(!configuration.trusted_peers.peers.is_empty()); + Topology::new(configuration.trusted_peers.peers.clone()) + } + height => { + let block_ref = kura.get_block_by_height(height).expect( + "Sumeragi could not load block that was reported as present. \ + Please check that the block storage was not disconnected.", + ); + Topology::recreate_topology( + &block_ref, + 0, + view.world.peers_ids().iter().cloned().collect(), + ) + } + }; } - // finalized_wsv is one block behind - let finalized_wsv = wsv.clone(); - - if let Some(latest_block) = blocks_iter.next() { - let latest_block = - ValidBlock::validate(Clone::clone(&latest_block), ¤t_topology, &mut wsv) + for block in blocks_iter { + let mut wsv_block = wsv.block(false); + let block = + ValidBlock::validate(Clone::clone(&block), ¤t_topology, &mut wsv_block) .expect("Kura blocks should be valid") .commit(¤t_topology) .expect("Kura blocks should be valid"); - wsv.apply_without_execution(&latest_block).expect( + wsv_block.apply_without_execution(&block).expect( "Block application in init should not fail. \ Blocks loaded from kura assumed to be valid", ); + wsv_block.commit(); } info!("Sumeragi has finished loading blocks and setting up the WSV"); - let (public_wsv_sender, public_wsv_receiver) = watch::channel(wsv.clone()); - let (public_finalized_wsv_sender, public_finalized_wsv_receiver) = - watch::channel(finalized_wsv.clone()); - #[cfg(debug_assertions)] let debug_force_soft_fork = configuration.debug_force_soft_fork; #[cfg(not(debug_assertions))] @@ -313,8 +263,6 @@ impl SumeragiHandle { queue: Arc::clone(&queue), peer_id: configuration.peer_id.clone(), events_sender, - public_wsv_sender, - public_finalized_wsv_sender, commit_time: Duration::from_millis(configuration.commit_time_limit_ms), block_time: Duration::from_millis(configuration.block_time_ms), max_txs_in_block: configuration.max_transactions_in_block as usize, @@ -324,18 +272,17 @@ impl SumeragiHandle { message_receiver, debug_force_soft_fork, current_topology, - wsv, - finalized_wsv, transaction_cache: Vec::new(), }; // Oneshot channel to allow forcefully stopping the thread. let (shutdown_sender, shutdown_receiver) = tokio::sync::oneshot::channel(); + let wsv_clone = Arc::clone(&wsv); let thread_handle = std::thread::Builder::new() .name("sumeragi thread".to_owned()) .spawn(move || { - main_loop::run(genesis_network, sumeragi, shutdown_receiver); + main_loop::run(genesis_network, sumeragi, shutdown_receiver, wsv_clone); }) .expect("Sumeragi thread spawn should not fail."); @@ -347,13 +294,12 @@ impl SumeragiHandle { let thread_handle = ThreadHandler::new(Box::new(shutdown), thread_handle); SumeragiHandle { + wsv, network, queue, kura, control_message_sender, message_sender, - public_wsv_receiver, - public_finalized_wsv_receiver, metrics: Metrics::default(), last_update_metrics_mutex: Arc::new(Mutex::new(LastUpdateMetricsData { block_height: 0, @@ -374,18 +320,18 @@ pub const TELEMETRY_INTERVAL: Duration = Duration::from_secs(5); /// Structure represents a block that is currently in discussion. #[non_exhaustive] -pub struct VotingBlock { +pub struct VotingBlock<'state> { /// At what time has this peer voted for this block pub voted_at: Instant, /// Valid Block pub block: ValidBlock, - /// WSV after applying transactions to it - pub new_wsv: WorldStateView, + /// [`WorldState`] after applying transactions to it but before it was committed + pub new_wsv: StateBlock<'state>, } -impl VotingBlock { +impl VotingBlock<'_> { /// Construct new `VotingBlock` with current time. - pub fn new(block: ValidBlock, new_wsv: WorldStateView) -> VotingBlock { + pub fn new(block: ValidBlock, new_wsv: StateBlock<'_>) -> VotingBlock { VotingBlock { block, voted_at: Instant::now(), @@ -395,7 +341,7 @@ impl VotingBlock { /// Construct new `VotingBlock` with the given time. pub(crate) fn voted_at( block: ValidBlock, - new_wsv: WorldStateView, + new_wsv: StateBlock<'_>, voted_at: Instant, ) -> VotingBlock { VotingBlock { @@ -411,7 +357,7 @@ impl VotingBlock { pub struct SumeragiStartArgs<'args> { pub configuration: &'args Configuration, pub events_sender: EventsSender, - pub wsv: WorldStateView, + pub wsv: Arc, pub queue: Arc, pub kura: Arc, pub network: IrohaNetwork, diff --git a/core/src/tx.rs b/core/src/tx.rs index 790d8942326..2d217a33df3 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -18,7 +18,10 @@ use iroha_genesis::GenesisTransaction; use iroha_logger::{debug, error}; use iroha_macro::FromVariant; -use crate::{prelude::*, smartcontracts::wasm}; +use crate::{ + smartcontracts::wasm, + wsv::{StateBlock, StateTransaction}, +}; /// `AcceptedTransaction` — a transaction accepted by iroha peer. #[derive(Debug, Clone, PartialEq, Eq)] @@ -320,7 +323,7 @@ impl TransactionExecutor { } /// Move transaction lifecycle forward by checking if the - /// instructions can be applied to the `WorldStateView`. + /// instructions can be applied to the [`StateBlock`]. /// /// Validation is skipped for genesis. /// @@ -329,11 +332,13 @@ impl TransactionExecutor { pub fn validate( &self, tx: AcceptedTransaction, - wsv: &mut WorldStateView, + wsv: &mut StateBlock<'_>, ) -> Result { - if let Err(rejection_reason) = self.validate_internal(tx.clone(), wsv) { + let mut transaction = wsv.transaction(); + if let Err(rejection_reason) = self.validate_internal(tx.clone(), &mut transaction) { return Err((tx.0, rejection_reason)); } + transaction.apply(); Ok(tx.0) } @@ -341,11 +346,12 @@ impl TransactionExecutor { fn validate_internal( &self, tx: AcceptedTransaction, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), TransactionRejectionReason> { let authority = &tx.payload().authority; if !wsv + .world .domain(&authority.domain_id) .map_err(|_e| { TransactionRejectionReason::AccountDoesNotExist(FindError::Domain( @@ -360,19 +366,13 @@ impl TransactionExecutor { )); } - // Create clone wsv to try execute transaction against it to prevent failed transaction from changing wsv - let mut wsv_for_validation = wsv.clone(); - debug!("Validating transaction: {:?}", tx); - Self::validate_with_runtime_executor(tx.clone(), &mut wsv_for_validation)?; + Self::validate_with_runtime_executor(tx.clone(), wsv)?; if let (authority, Executable::Wasm(bytes)) = tx.into() { - self.validate_wasm(authority, &mut wsv_for_validation, bytes)? + self.validate_wasm(authority, wsv, bytes)? } - // Replace wsv in case of successful execution - *wsv = wsv_for_validation; - debug!("Validation successful"); Ok(()) } @@ -380,7 +380,7 @@ impl TransactionExecutor { fn validate_wasm( &self, authority: AccountId, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, wasm: WasmSmartContract, ) -> Result<(), TransactionRejectionReason> { debug!("Validating wasm"); @@ -406,12 +406,13 @@ impl TransactionExecutor { /// Note: transaction instructions will be executed on the given `wsv`. fn validate_with_runtime_executor( tx: AcceptedTransaction, - wsv: &mut WorldStateView, + wsv: &mut StateTransaction<'_, '_>, ) -> Result<(), TransactionRejectionReason> { let tx: SignedTransaction = tx.into(); let authority = tx.payload().authority.clone(); - wsv.executor() + wsv.world + .executor .clone() // Cloning executor is a cheap operation .validate_transaction(wsv, &authority, tx) .map_err(|error| { diff --git a/core/src/wsv.rs b/core/src/wsv.rs index eaac7559502..0247a43451c 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -1,13 +1,5 @@ -//! This module provides the [`WorldStateView`] — an in-memory representation of the current blockchain -//! state. -use std::{ - borrow::Borrow, - collections::{BTreeSet, HashMap}, - fmt::Debug, - marker::PhantomData, - sync::Arc, - time::Duration, -}; +//! This module provides the [`State`] — an in-memory representation of the current blockchain state. +use std::{borrow::Borrow, collections::BTreeSet, marker::PhantomData, sync::Arc, time::Duration}; use eyre::Result; use iroha_config::{ @@ -21,9 +13,10 @@ use iroha_data_model::{ events::notification::{TriggerCompletedEvent, TriggerCompletedOutcome}, isi::error::{InstructionExecutionError as Error, MathError}, parameter::Parameter, - permission::PermissionTokenSchema, + permission::{PermissionTokenSchema, Permissions}, prelude::*, query::error::{FindError, QueryExecutionFail}, + role::RoleId, }; use iroha_logger::prelude::*; use iroha_primitives::small::SmallVec; @@ -33,12 +26,20 @@ use serde::{ de::{DeserializeSeed, MapAccess, Visitor}, Deserializer, Serialize, }; +use storage::{ + cell::{Block as CellBlock, Cell, Transaction as CellTransaction, View as CellView}, + storage::{ + Block as StorageBlock, RangeIter, Snapshot as StorageSnapshot, Storage, + Transaction as StorageTransaction, View as StorageView, + }, +}; use crate::{ block::CommittedBlock, executor::Executor, kura::Kura, query::store::LiveQueryStoreHandle, + role::RoleIdWithOwner, smartcontracts::{ triggers::{ self, @@ -47,201 +48,251 @@ use crate::{ wasm, Execute, }, tx::TransactionExecutor, - DomainsMap, Parameters, PeersIds, + Parameters, PeersIds, }; /// The global entity consisting of `domains`, `triggers` and etc. /// For example registration of domain, will have this as an ISI target. -#[derive(Debug, Default, Clone, Serialize)] +#[derive(Default, Serialize)] pub struct World { /// Iroha config parameters. - pub(crate) parameters: Parameters, + pub(crate) parameters: Cell, /// Identifications of discovered trusted peers. - pub(crate) trusted_peers_ids: PeersIds, + pub(crate) trusted_peers_ids: Cell, /// Registered domains. - pub(crate) domains: DomainsMap, + pub(crate) domains: Storage, /// Roles. [`Role`] pairs. - pub(crate) roles: crate::RolesMap, + pub(crate) roles: Storage, /// Permission tokens of an account. - pub(crate) account_permission_tokens: crate::PermissionTokensMap, + pub(crate) account_permission_tokens: Storage, /// Roles of an account. - pub(crate) account_roles: crate::AccountRolesSet, + pub(crate) account_roles: Storage, /// Registered permission token ids. - pub(crate) permission_token_schema: PermissionTokenSchema, + pub(crate) permission_token_schema: Cell, /// Triggers - pub(crate) triggers: TriggerSet, + // TODO: refactor `TriggerSet` to use storage inside + pub(crate) triggers: Cell, /// Runtime Executor - pub(crate) executor: Executor, + pub(crate) executor: Cell, } -// Loader for [`Set`] -#[derive(Clone, Copy)] -pub(crate) struct WasmSeed<'e, T> { - pub engine: &'e wasmtime::Engine, - _marker: PhantomData, +/// Struct for block's aggregated changes +pub struct WorldBlock<'world> { + /// Iroha config parameters. + pub(crate) parameters: CellBlock<'world, Parameters>, + /// Identifications of discovered trusted peers. + pub(crate) trusted_peers_ids: CellBlock<'world, PeersIds>, + /// Registered domains. + pub(crate) domains: StorageBlock<'world, DomainId, Domain>, + /// Roles. [`Role`] pairs. + pub(crate) roles: StorageBlock<'world, RoleId, Role>, + /// Permission tokens of an account. + pub(crate) account_permission_tokens: StorageBlock<'world, AccountId, Permissions>, + /// Roles of an account. + pub(crate) account_roles: StorageBlock<'world, RoleIdWithOwner, ()>, + /// Registered permission token ids. + pub(crate) permission_token_schema: CellBlock<'world, PermissionTokenSchema>, + /// Triggers + pub(crate) triggers: CellBlock<'world, TriggerSet>, + /// Runtime Executor + pub(crate) executor: CellBlock<'world, Executor>, + /// Events produced during of block + pub(crate) events_buffer: Vec, } -impl<'e, T> WasmSeed<'e, T> { - pub fn cast(&self) -> WasmSeed<'e, U> { - WasmSeed { - engine: self.engine, - _marker: PhantomData, - } - } +/// Struct for single transaction's aggregated changes +pub struct WorldTransaction<'block, 'world> { + /// Iroha config parameters. + pub(crate) parameters: CellTransaction<'block, 'world, Parameters>, + /// Identifications of discovered trusted peers. + pub(crate) trusted_peers_ids: CellTransaction<'block, 'world, PeersIds>, + /// Registered domains. + pub(crate) domains: StorageTransaction<'block, 'world, DomainId, Domain>, + /// Roles. [`Role`] pairs. + pub(crate) roles: StorageTransaction<'block, 'world, RoleId, Role>, + /// Permission tokens of an account. + pub(crate) account_permission_tokens: + StorageTransaction<'block, 'world, AccountId, Permissions>, + /// Roles of an account. + pub(crate) account_roles: StorageTransaction<'block, 'world, RoleIdWithOwner, ()>, + /// Registered permission token ids. + pub(crate) permission_token_schema: CellTransaction<'block, 'world, PermissionTokenSchema>, + /// Triggers + pub(crate) triggers: CellTransaction<'block, 'world, TriggerSet>, + /// Runtime Executor + pub(crate) executor: CellTransaction<'block, 'world, Executor>, + /// Events produced during execution of block + events_buffer: TransactionEventBuffer<'block>, } -impl<'e, 'de, T> DeserializeSeed<'de> for WasmSeed<'e, Option> -where - WasmSeed<'e, T>: DeserializeSeed<'de, Value = T>, -{ - type Value = Option; +/// Wrapper for event's buffer to apply transaction rollback +struct TransactionEventBuffer<'block> { + /// Events produced during execution of block + events_buffer: &'block mut Vec, + /// Number of events produced during execution current transaction + events_created_in_transaction: usize, +} - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct OptionVisitor<'l, T> { - loader: WasmSeed<'l, T>, - _marker: PhantomData, - } +/// Consistent point in time view of the [`World`] +pub struct WorldView<'world> { + /// Iroha config parameters. + pub(crate) parameters: CellView<'world, Parameters>, + /// Identifications of discovered trusted peers. + pub(crate) trusted_peers_ids: CellView<'world, PeersIds>, + /// Registered domains. + pub(crate) domains: StorageView<'world, DomainId, Domain>, + /// Roles. [`Role`] pairs. + pub(crate) roles: StorageView<'world, RoleId, Role>, + /// Permission tokens of an account. + pub(crate) account_permission_tokens: StorageView<'world, AccountId, Permissions>, + /// Roles of an account. + pub(crate) account_roles: StorageView<'world, RoleIdWithOwner, ()>, + /// Registered permission token ids. + pub(crate) permission_token_schema: CellView<'world, PermissionTokenSchema>, + /// Triggers + pub(crate) triggers: CellView<'world, TriggerSet>, + /// Runtime Executor + pub(crate) executor: CellView<'world, Executor>, +} - impl<'e, 'de, T> Visitor<'de> for OptionVisitor<'e, T> - where - WasmSeed<'e, T>: DeserializeSeed<'de, Value = T>, - { - type Value = Option; +/// Consistent point in time view of the [`World`] +/// Used in places where [`WorldBlock`], [`WorldTransaction`], [`WorldView`] need to be converted to the same type for immutable operations +pub struct WorldSnapshot<'world> { + /// Iroha config parameters. + pub(crate) parameters: &'world Parameters, + /// Identifications of discovered trusted peers. + pub(crate) trusted_peers_ids: &'world PeersIds, + /// Registered domains. + pub(crate) domains: StorageSnapshot<'world, DomainId, Domain>, + /// Roles. [`Role`] pairs. + pub(crate) roles: StorageSnapshot<'world, RoleId, Role>, + /// Permission tokens of an account. + pub(crate) account_permission_tokens: StorageSnapshot<'world, AccountId, Permissions>, + /// Roles of an account. + pub(crate) account_roles: StorageSnapshot<'world, RoleIdWithOwner, ()>, + /// Registered permission token ids. + pub(crate) permission_token_schema: &'world PermissionTokenSchema, + /// Triggers + pub(crate) triggers: &'world TriggerSet, + /// Runtime Executor + pub(crate) executor: &'world Executor, +} - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("struct World") - } +/// Current state of the blockchain aligned with `Iroha` module. +#[derive(Serialize)] +pub struct State { + /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. + pub world: World, + /// Configuration of World State View. + pub config: Cell, + /// Blockchain. + // TODO: Cell is redundant here since block_hashes is very easy to rollback by just popping the last element + pub block_hashes: Cell>>, + /// Hashes of transactions mapped onto block height where they stored + pub transactions: Storage, u64>, + /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. + #[serde(skip)] + pub engine: wasmtime::Engine, - fn visit_none(self) -> Result - where - E: serde::de::Error, - { - Ok(None) - } + /// Reference to Kura subsystem. + #[serde(skip)] + kura: Arc, + /// Handle to the [`LiveQueryStore`]. + #[serde(skip)] + pub query_handle: LiveQueryStoreHandle, + /// Temporary metrics buffer of amounts of any asset that has been transacted. + /// TODO: this should be done through events + #[serde(skip)] + pub new_tx_amounts: Arc>>, +} - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Some(self.loader.deserialize(deserializer)).transpose() - } - } +/// Current state of the blockchain aligned with `Iroha` module. +pub struct StateBlock<'state> { + /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. + pub world: WorldBlock<'state>, + /// Configuration of World State View. + pub config: CellBlock<'state, Configuration>, + /// Blockchain. + pub block_hashes: CellBlock<'state, Vec>>, + /// Hashes of transactions mapped onto block height where they stored + pub transactions: StorageBlock<'state, HashOf, u64>, + /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. + pub engine: &'state wasmtime::Engine, - let visitor = OptionVisitor { - loader: self.cast::(), - _marker: PhantomData, - }; - deserializer.deserialize_option(visitor) - } + /// Reference to Kura subsystem. + kura: &'state Kura, + /// Handle to the [`LiveQueryStore`]. + pub query_handle: &'state LiveQueryStoreHandle, + /// Temporary metrics buffer of amounts of any asset that has been transacted. + /// TODO: this should be done through events + pub new_tx_amounts: &'state Mutex>, } -impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { - type Value = World; - - fn deserialize(self, deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - struct WorldVisitor<'l> { - loader: &'l WasmSeed<'l, World>, - } +/// Current state of the blockchain aligned with `Iroha` module. +pub struct StateTransaction<'block, 'state> { + /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. + pub world: WorldTransaction<'block, 'state>, + /// Configuration of World State View. + pub config: CellTransaction<'block, 'state, Configuration>, + /// Blockchain. + pub block_hashes: CellTransaction<'block, 'state, Vec>>, + /// Hashes of transactions mapped onto block height where they stored + pub transactions: StorageTransaction<'block, 'state, HashOf, u64>, + /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. + pub engine: &'state wasmtime::Engine, - impl<'de> Visitor<'de> for WorldVisitor<'_> { - type Value = World; + /// Reference to Kura subsystem. + kura: &'state Kura, + /// Handle to the [`LiveQueryStore`]. + pub query_handle: &'state LiveQueryStoreHandle, + /// Temporary metrics buffer of amounts of any asset that has been transacted. + /// TODO: this should be done through events + pub new_tx_amounts: &'state Mutex>, +} - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("struct World") - } +/// Current state of the blockchain aligned with `Iroha` module. +pub struct StateView<'state> { + /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. + pub world: WorldView<'state>, + /// Configuration of World State View. + pub config: CellView<'state, Configuration>, + /// Blockchain. + pub block_hashes: CellView<'state, Vec>>, + /// Hashes of transactions mapped onto block height where they stored + pub transactions: StorageView<'state, HashOf, u64>, + /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. + pub engine: &'state wasmtime::Engine, - fn visit_map(self, mut map: M) -> Result - where - M: MapAccess<'de>, - { - let mut parameters = None; - let mut trusted_peers_ids = None; - let mut domains = None; - let mut roles = None; - let mut account_permission_tokens = None; - let mut account_roles = None; - let mut permission_token_schema = None; - let mut triggers = None; - let mut executor = None; - - while let Some(key) = map.next_key::()? { - match key.as_str() { - "parameters" => { - parameters = Some(map.next_value()?); - } - "trusted_peers_ids" => { - trusted_peers_ids = Some(map.next_value()?); - } - "domains" => { - domains = Some(map.next_value()?); - } - "roles" => { - roles = Some(map.next_value()?); - } - "account_permission_tokens" => { - account_permission_tokens = Some(map.next_value()?); - } - "account_roles" => { - account_roles = Some(map.next_value()?); - } - "permission_token_schema" => { - permission_token_schema = Some(map.next_value()?); - } - "triggers" => { - triggers = Some(map.next_value_seed(self.loader.cast::())?); - } - "executor" => { - executor = Some(map.next_value_seed(self.loader.cast::())?); - } - _ => { /* Skip unknown fields */ } - } - } + /// Reference to Kura subsystem. + kura: &'state Kura, + /// Handle to the [`LiveQueryStore`]. + pub query_handle: &'state LiveQueryStoreHandle, + /// Temporary metrics buffer of amounts of any asset that has been transacted. + /// TODO: this should be done through events + pub new_tx_amounts: &'state Mutex>, +} - Ok(World { - parameters: parameters - .ok_or_else(|| serde::de::Error::missing_field("parameters"))?, - trusted_peers_ids: trusted_peers_ids - .ok_or_else(|| serde::de::Error::missing_field("trusted_peers_ids"))?, - domains: domains.ok_or_else(|| serde::de::Error::missing_field("domains"))?, - roles: roles.ok_or_else(|| serde::de::Error::missing_field("roles"))?, - account_permission_tokens: account_permission_tokens.ok_or_else(|| { - serde::de::Error::missing_field("account_permission_tokens") - })?, - account_roles: account_roles - .ok_or_else(|| serde::de::Error::missing_field("account_roles"))?, - permission_token_schema: permission_token_schema.ok_or_else(|| { - serde::de::Error::missing_field("permission_token_schema") - })?, - triggers: triggers - .ok_or_else(|| serde::de::Error::missing_field("triggers"))?, - executor: executor - .ok_or_else(|| serde::de::Error::missing_field("executor"))?, - }) - } - } +/// Current state of the blockchain aligned with `Iroha` module. +/// Used in places where [`StateBlock`], [`StateTransaction`], [`StateView`] need to be converted to the same type for immutable operations +pub struct StateSnapshot<'state> { + /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. + pub world: WorldSnapshot<'state>, + /// Configuration of World State View. + pub config: &'state Configuration, + /// Blockchain. + pub block_hashes: &'state Vec>, + /// Hashes of transactions mapped onto block height where they stored + pub transactions: StorageSnapshot<'state, HashOf, u64>, + /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. + pub engine: &'state wasmtime::Engine, - deserializer.deserialize_struct( - "World", - &[ - "parameters", - "trusted_peers_ids", - "domains", - "roles", - "account_permission_tokens", - "account_roles", - "permission_token_schema", - "triggers", - "executor", - ], - WorldVisitor { loader: &self }, - ) - } + /// Reference to Kura subsystem. + kura: &'state Kura, + /// Handle to the [`LiveQueryStore`]. + pub query_handle: &'state LiveQueryStoreHandle, + /// Temporary metrics buffer of amounts of any asset that has been transacted. + /// TODO: this should be done through events + pub new_tx_amounts: &'state Mutex>, } impl World { @@ -260,153 +311,119 @@ impl World { .map(|domain| (domain.id().clone(), domain)) .collect(); World { - trusted_peers_ids, + trusted_peers_ids: Cell::new(trusted_peers_ids), domains, ..World::new() } } -} - -/// Current state of the blockchain aligned with `Iroha` module. -#[derive(Serialize)] -pub struct WorldStateView { - /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. - pub world: World, - /// Configuration of World State View. - pub config: Configuration, - /// Blockchain. - pub block_hashes: Vec>, - /// Hashes of transactions mapped onto block height where they stored - pub transactions: HashMap, u64>, - /// Buffer containing events generated during `WorldStateView::apply`. Renewed on every block commit. - #[serde(skip)] - pub events_buffer: Vec, - /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. - #[serde(skip)] - pub engine: wasmtime::Engine, - - /// Reference to Kura subsystem. - #[serde(skip)] - kura: Arc, - /// Handle to the [`LiveQueryStore`]. - #[serde(skip)] - query_handle: LiveQueryStoreHandle, - /// Temporary metrics buffer of amounts of any asset that has been transacted. - #[serde(skip)] - pub new_tx_amounts: Arc>>, -} - -/// Context necessary for deserializing [`WorldStateView`] -pub struct KuraSeed { - /// Kura subsystem reference - pub kura: Arc, - /// Handle to the [`LiveQueryStore`](crate::query::store::LiveQueryStore). - pub query_handle: LiveQueryStoreHandle, -} -impl<'de> DeserializeSeed<'de> for KuraSeed { - type Value = WorldStateView; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct WorldStateViewVisitor { - loader: KuraSeed, + /// Create struct to apply block's changes + pub fn block(&self, rollback_latest_block: bool) -> WorldBlock { + WorldBlock { + parameters: self.parameters.block(rollback_latest_block), + trusted_peers_ids: self.trusted_peers_ids.block(rollback_latest_block), + domains: self.domains.block(rollback_latest_block), + roles: self.roles.block(rollback_latest_block), + account_permission_tokens: self.account_permission_tokens.block(rollback_latest_block), + account_roles: self.account_roles.block(rollback_latest_block), + permission_token_schema: self.permission_token_schema.block(rollback_latest_block), + triggers: self.triggers.block(rollback_latest_block), + executor: self.executor.block(rollback_latest_block), + events_buffer: Vec::new(), } + } - impl<'de> Visitor<'de> for WorldStateViewVisitor { - type Value = WorldStateView; + /// Create point in time view of the [`World`] + pub fn view(&self) -> WorldView { + WorldView { + parameters: self.parameters.view(), + trusted_peers_ids: self.trusted_peers_ids.view(), + domains: self.domains.view(), + roles: self.roles.view(), + account_permission_tokens: self.account_permission_tokens.view(), + account_roles: self.account_roles.view(), + permission_token_schema: self.permission_token_schema.view(), + triggers: self.triggers.view(), + executor: self.executor.view(), + } + } +} - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("struct WorldStateView") +macro_rules! world_read_only_methods { + ($($tt:tt)*) => { + macro_rules! insert_world_read_only_methods { + () => { + $($tt)* } + } + } +} - fn visit_map(self, mut map: M) -> Result - where - M: MapAccess<'de>, - { - let mut world = None; - let mut config = None; - let mut block_hashes = None; - let mut transactions = None; - - let engine = wasm::create_engine(); - - let wasm_seed: WasmSeed<()> = WasmSeed { - engine: &engine, - _marker: PhantomData, - }; +world_read_only_methods! { + // Domain-related methods - while let Some(key) = map.next_key::()? { - match key.as_str() { - "world" => { - world = Some(map.next_value_seed(wasm_seed.cast::())?); - } - "config" => { - config = Some(map.next_value()?); - } - "block_hashes" => { - block_hashes = Some(map.next_value()?); - } - "transactions" => { - transactions = Some(map.next_value()?); - } - _ => { /* Skip unknown fields */ } - } - } + /// Get `Domain` without an ability to modify it. + /// + /// # Errors + /// Fails if there is no domain + pub fn domain(&self, id: &DomainId) -> Result<&Domain, FindError> { + let domain = self + .domains + .get(id) + .ok_or_else(|| FindError::Domain(id.clone()))?; + Ok(domain) + } - Ok(WorldStateView { - world: world.ok_or_else(|| serde::de::Error::missing_field("world"))?, - config: config.ok_or_else(|| serde::de::Error::missing_field("config"))?, - block_hashes: block_hashes - .ok_or_else(|| serde::de::Error::missing_field("block_hashes"))?, - transactions: transactions - .ok_or_else(|| serde::de::Error::missing_field("transactions"))?, - kura: self.loader.kura, - query_handle: self.loader.query_handle, - engine, - events_buffer: Vec::new(), - new_tx_amounts: Arc::new(Mutex::new(Vec::new())), - }) - } - } + /// Get `Domain` and pass it to closure. + /// + /// # Errors + /// Fails if there is no domain + pub fn map_domain<'slf, T>( + &'slf self, + id: &DomainId, + f: impl FnOnce(&'slf Domain) -> T, + ) -> Result { + let domain = self.domain(id)?; + let value = f(domain); + Ok(value) + } - deserializer.deserialize_struct( - "WorldStateView", - &["world", "config", "block_hashes", "transactions"], - WorldStateViewVisitor { loader: self }, - ) + /// Returns reference for domains map + #[inline] + pub fn domains(&self) -> impl Iterator { + self.domains.iter().map(|(_, domain)| domain) } -} -impl Clone for WorldStateView { - fn clone(&self) -> Self { - Self { - world: Clone::clone(&self.world), - config: self.config, - block_hashes: self.block_hashes.clone(), - transactions: self.transactions.clone(), - events_buffer: Vec::new(), - new_tx_amounts: Arc::clone(&self.new_tx_amounts), - engine: self.engine.clone(), - kura: Arc::clone(&self.kura), - query_handle: self.query_handle.clone(), - } + // Account-related methods + + /// Get `Account` and return reference to it. + /// + /// # Errors + /// Fails if there is no domain or account + pub fn account(&self, id: &AccountId) -> Result<&Account, FindError> { + self.domain(&id.domain_id).and_then(|domain| { + domain + .accounts + .get(id) + .ok_or_else(|| FindError::Account(id.clone())) + }) } -} -/// WARNING!!! INTERNAL USE ONLY!!! -impl WorldStateView { - /// Construct [`WorldStateView`] with given [`World`]. - #[must_use] - #[inline] - pub fn new(world: World, kura: Arc, query_handle: LiveQueryStoreHandle) -> Self { - // Added to remain backward compatible with other code primary in tests - let config = ConfigurationProxy::default() - .build() - .expect("Wsv proxy always builds"); - Self::from_configuration(config, world, kura, query_handle) + /// Get `Account` and pass it to closure. + /// + /// # Errors + /// Fails if there is no domain or account + pub fn map_account<'slf, T>( + &'slf self, + id: &AccountId, + f: impl FnOnce(&'slf Account) -> T, + ) -> Result { + let domain = self.domain(&id.domain_id)?; + let account = domain + .accounts + .get(id) + .ok_or(FindError::Account(id.clone()))?; + Ok(f(account)) } /// Get `Account`'s `Asset`s @@ -421,11 +438,11 @@ impl WorldStateView { } /// Get [`Account`]'s [`RoleId`]s - pub fn account_roles(&self, id: &AccountId) -> impl Iterator { - self.world - .account_roles + // NOTE: have to use concreate type because don't want to capture lifetme of `id` + pub fn account_roles<'slf>(&'slf self, id: &AccountId) -> core::iter::Map, fn((&'slf RoleIdWithOwner, &'slf ())) -> &'slf RoleId> { + self.account_roles .range(RoleIdByAccountBounds::new(id)) - .map(|role| &role.role_id) + .map(|(role, _)| &role.role_id) } /// Return a set of all permission tokens granted to this account. @@ -433,10 +450,10 @@ impl WorldStateView { /// # Errors /// /// - if `account_id` is not found in `self` - pub fn account_permission_tokens( - &self, + pub fn account_permission_tokens<'slf>( + &'slf self, account_id: &AccountId, - ) -> Result, FindError> { + ) -> Result, FindError> { self.account(account_id)?; let mut tokens = self @@ -444,7 +461,7 @@ impl WorldStateView { .collect::>(); for role_id in self.account_roles(account_id) { - if let Some(role) = self.world.roles.get(role_id) { + if let Some(role) = self.roles.get(role_id) { tokens.extend(role.permissions.iter()); } } @@ -457,12 +474,11 @@ impl WorldStateView { /// # Errors /// /// - `account_id` is not found in `self.world`. - pub fn account_inherent_permission_tokens( - &self, + pub fn account_inherent_permission_tokens<'slf>( + &'slf self, account_id: &AccountId, - ) -> impl ExactSizeIterator { - self.world - .account_permission_tokens + ) -> std::collections::btree_set::Iter<'slf, PermissionToken> { + self.account_permission_tokens .get(account_id) .map_or_else(Default::default, std::collections::BTreeSet::iter) } @@ -474,21 +490,211 @@ impl WorldStateView { account: &AccountId, token: &PermissionToken, ) -> bool { - self.world - .account_permission_tokens + self.account_permission_tokens .get(account) .map_or(false, |permissions| permissions.contains(token)) } - /// Add [`permission`](PermissionToken) to the [`Account`] if the account does not have this permission yet. + // Asset-related methods + + /// Get `Asset` by its id /// - /// Return a Boolean value indicating whether or not the [`Account`] already had this permission. + /// # Errors + /// - No such [`Asset`] + /// - The [`Account`] with which the [`Asset`] is associated doesn't exist. + /// - The [`Domain`] with which the [`Account`] is associated doesn't exist. + pub fn asset(&self, id: &AssetId) -> Result { + self.map_account( + &id.account_id, + |account| -> Result { + account + .assets + .get(id) + .ok_or_else(|| QueryExecutionFail::from(FindError::Asset(id.clone()))) + .map(Clone::clone) + }, + )? + } + + // AssetDefinition-related methods + + /// Get `AssetDefinition` immutable view. + /// + /// # Errors + /// - Asset definition entry not found + pub fn asset_definition( + &self, + asset_id: &AssetDefinitionId, + ) -> Result { + self.domain(&asset_id.domain_id)? + .asset_definitions + .get(asset_id) + .ok_or_else(|| FindError::AssetDefinition(asset_id.clone())) + .map(Clone::clone) + } + + /// Get total amount of [`Asset`]. + /// + /// # Errors + /// - Asset definition not found + pub fn asset_total_amount( + &self, + definition_id: &AssetDefinitionId, + ) -> Result { + self.domain(&definition_id.domain_id)? + .asset_total_quantities + .get(definition_id) + .ok_or_else(|| FindError::AssetDefinition(definition_id.clone())) + .copied() + } + + /// Get an immutable iterator over the [`PeerId`]s. + pub fn peers(&self) -> impl ExactSizeIterator { + self.trusted_peers_ids.iter() + } + + /// Get all `Parameter`s registered in the world. + pub fn parameters(&self) -> impl Iterator { + self.parameters.iter() + } + + /// Query parameter and convert it to a proper type + pub fn query_param, P: core::hash::Hash + Eq + ?Sized>( + &self, + param: &P, + ) -> Option + where + Parameter: Borrow

, + { + Parameters::get(&self.parameters, param) + .as_ref() + .map(|param| &*param.val) + .cloned() + .and_then(|param_val| param_val.try_into().ok()) + } + + /// Returns reference for trusted peer ids + #[inline] + pub fn peers_ids(&self) -> &PeersIds { + &self.trusted_peers_ids + } +} + +impl<'world> WorldBlock<'world> { + /// Create struct to apply transaction's changes + pub fn trasaction(&mut self) -> WorldTransaction<'_, 'world> { + WorldTransaction { + parameters: self.parameters.transaction(), + trusted_peers_ids: self.trusted_peers_ids.transaction(), + domains: self.domains.transaction(), + roles: self.roles.transaction(), + account_permission_tokens: self.account_permission_tokens.transaction(), + account_roles: self.account_roles.transaction(), + permission_token_schema: self.permission_token_schema.transaction(), + triggers: self.triggers.transaction(), + executor: self.executor.transaction(), + events_buffer: TransactionEventBuffer { + events_buffer: &mut self.events_buffer, + events_created_in_transaction: 0, + }, + } + } + + /// Commit block's changes + pub fn commit(self) { + // IMPORTANT!!! Commit fields in reverse order, this way consistent results are insured + self.executor.commit(); + self.triggers.commit(); + self.permission_token_schema.commit(); + self.account_roles.commit(); + self.account_permission_tokens.commit(); + self.roles.commit(); + self.domains.commit(); + self.trusted_peers_ids.commit(); + self.parameters.commit(); + } + + /// Convert [`Self`] to [`WorldSnapshot`] + pub fn to_snapshot(&self) -> WorldSnapshot<'_> { + WorldSnapshot { + parameters: &self.parameters, + trusted_peers_ids: &self.trusted_peers_ids, + domains: self.domains.to_snapshot(), + roles: self.roles.to_snapshot(), + account_permission_tokens: self.account_permission_tokens.to_snapshot(), + account_roles: self.account_roles.to_snapshot(), + permission_token_schema: &self.permission_token_schema, + triggers: &self.triggers, + executor: &self.executor, + } + } + + insert_world_read_only_methods! {} +} + +impl WorldTransaction<'_, '_> { + /// Apply transaction's changes + pub fn apply(mut self) { + self.executor.apply(); + self.triggers.apply(); + self.permission_token_schema.apply(); + self.account_roles.apply(); + self.account_permission_tokens.apply(); + self.roles.apply(); + self.domains.apply(); + self.trusted_peers_ids.apply(); + self.parameters.apply(); + self.events_buffer.events_created_in_transaction = 0; + } + + /// Convert [`Self`] to [`WorldSnapshot`] + pub fn to_snapshot(&self) -> WorldSnapshot<'_> { + WorldSnapshot { + parameters: &self.parameters, + trusted_peers_ids: &self.trusted_peers_ids, + domains: self.domains.to_snapshot(), + roles: self.roles.to_snapshot(), + account_permission_tokens: self.account_permission_tokens.to_snapshot(), + account_roles: self.account_roles.to_snapshot(), + permission_token_schema: &self.permission_token_schema, + triggers: &self.triggers, + executor: &self.executor, + } + } + + /// Get `Domain` with an ability to modify it. + /// + /// # Errors + /// Fails if there is no domain + pub fn domain_mut(&mut self, id: &DomainId) -> Result<&mut Domain, FindError> { + let domain = self + .domains + .get_mut(id) + .ok_or_else(|| FindError::Domain(id.clone()))?; + Ok(domain) + } + + /// Get mutable reference to [`Account`] + /// + /// # Errors + /// Fail if domain or account not found + pub fn account_mut(&mut self, id: &AccountId) -> Result<&mut Account, FindError> { + self.domain_mut(&id.domain_id).and_then(move |domain| { + domain + .accounts + .get_mut(id) + .ok_or_else(|| FindError::Account(id.clone())) + }) + } + + /// Add [`permission`](PermissionToken) to the [`Account`] if the account does not have this permission yet. + /// + /// Return a Boolean value indicating whether or not the [`Account`] already had this permission. pub fn add_account_permission(&mut self, account: &AccountId, token: PermissionToken) -> bool { // `match` here instead of `map_or_else` to avoid cloning token into each closure - match self.world.account_permission_tokens.get_mut(account) { + match self.account_permission_tokens.get_mut(account) { None => { - self.world - .account_permission_tokens + self.account_permission_tokens .insert(account.clone(), BTreeSet::from([token])); true } @@ -509,317 +715,370 @@ impl WorldStateView { account: &AccountId, token: &PermissionToken, ) -> bool { - self.world - .account_permission_tokens + self.account_permission_tokens .get_mut(account) .map_or(false, |permissions| permissions.remove(token)) } - fn process_trigger( - &mut self, - id: &TriggerId, - action: &dyn LoadedActionTrait, - event: Event, - ) -> Result<()> { - use triggers::set::LoadedExecutable::*; - let authority = action.authority(); - - match action.executable() { - Instructions(instructions) => { - self.process_instructions(instructions.iter().cloned(), authority) - } - Wasm(LoadedWasm { module, .. }) => { - let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_configuration(self.config.wasm_runtime_config) - .with_engine(self.engine.clone()) // Cloning engine is cheap - .build()?; - wasm_runtime - .execute_trigger_module(self, id, authority.clone(), module, event) - .map_err(Into::into) - } - } + /// Get mutable reference to [`Asset`] + /// + /// # Errors + /// If domain, account or asset not found + pub fn asset_mut(&mut self, id: &AssetId) -> Result<&mut Asset, FindError> { + self.account_mut(&id.account_id).and_then(move |account| { + account + .assets + .get_mut(id) + .ok_or_else(|| FindError::Asset(id.clone())) + }) } - /// Process every trigger in `matched_ids` - fn process_triggers(&mut self) -> Result<(), Vec> { - // Cloning and clearing `self.matched_ids` so that `handle_` call won't deadlock - let matched_ids = self.world.triggers.extract_matched_ids(); - let mut succeed = Vec::::with_capacity(matched_ids.len()); - let mut errors = Vec::new(); - for (event, id) in matched_ids { - // Eliding the closure triggers a lifetime mismatch - #[allow(clippy::redundant_closure_for_method_calls)] - let action = self - .world - .triggers - .inspect_by_id(&id, |action| action.clone_and_box()); - if let Some(action) = action { - if let Repeats::Exactly(repeats) = action.repeats() { - if *repeats == 0 { - continue; - } - } - let event = match self.process_trigger(&id, &action, event) { - Ok(_) => { - succeed.push(id.clone()); - TriggerCompletedEvent::new(id, TriggerCompletedOutcome::Success) - } - Err(error) => { - let event = TriggerCompletedEvent::new( - id, - TriggerCompletedOutcome::Failure(error.to_string()), - ); - errors.push(error); - event - } - }; - self.events_buffer - .push(NotificationEvent::from(event).into()); - } + /// Get asset or inserts new with `default_asset_value`. + /// + /// # Errors + /// - There is no account with such name. + #[allow(clippy::missing_panics_doc)] + pub fn asset_or_insert( + &mut self, + asset_id: AssetId, + default_asset_value: impl Into, + ) -> Result<&mut Asset, Error> { + // Check that asset definition exists + { + let asset_definition_id = &asset_id.definition_id; + let asset_definition_domain_id = &asset_id.definition_id.domain_id; + let asset_definition_domain = self + .domains + .get(asset_definition_domain_id) + .ok_or(FindError::Domain(asset_definition_domain_id.clone()))?; + asset_definition_domain + .asset_definitions + .get(asset_definition_id) + .ok_or(FindError::AssetDefinition(asset_definition_id.clone()))?; } - self.world.triggers.decrease_repeats(&succeed); - - errors.is_empty().then_some(()).ok_or(errors) - } + let account_id = &asset_id.account_id; + let account_domain = self + .domains + .get_mut(&asset_id.account_id.domain_id) + .ok_or(FindError::Domain(asset_id.account_id.domain_id.clone()))?; + let account = account_domain + .accounts + .get_mut(account_id) + .ok_or(FindError::Account(account_id.clone()))?; - fn process_executable(&mut self, executable: &Executable, authority: AccountId) -> Result<()> { - match executable { - Executable::Instructions(instructions) => { - self.process_instructions(instructions.iter().cloned(), &authority) - } - Executable::Wasm(bytes) => { - let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_configuration(self.config.wasm_runtime_config) - .with_engine(self.engine.clone()) // Cloning engine is cheap - .build()?; - wasm_runtime - .execute(self, authority, bytes) - .map_err(Into::into) - } - } + Ok(account.assets.entry(asset_id.clone()).or_insert_with(|| { + let asset = Asset::new(asset_id, default_asset_value.into()); + Self::emit_events_impl( + &mut self.triggers, + &mut self.events_buffer, + Some(AccountEvent::Asset(AssetEvent::Created(asset.clone()))), + ); + asset + })) } - fn process_instructions( + /// Get mutable reference to [`AssetDefinition`] + /// + /// # Errors + /// If domain or asset definition not found + pub fn asset_definition_mut( &mut self, - instructions: impl IntoIterator, - authority: &AccountId, - ) -> Result<()> { - instructions.into_iter().try_for_each(|instruction| { - instruction.execute(authority, self)?; - Ok::<_, eyre::Report>(()) + id: &AssetDefinitionId, + ) -> Result<&mut AssetDefinition, FindError> { + self.domain_mut(&id.domain_id).and_then(|domain| { + domain + .asset_definitions + .get_mut(id) + .ok_or_else(|| FindError::AssetDefinition(id.clone())) }) } - /// Apply `CommittedBlock` with changes in form of **Iroha Special - /// Instructions** to `self`. - /// - /// Order of execution: - /// 1) Transactions - /// 2) Triggers + /// Increase [`Asset`] total amount by given value /// /// # Errors + /// - [`AssetDefinition`], [`Domain`] not found + /// - Overflow + pub fn increase_asset_total_amount( + &mut self, + definition_id: &AssetDefinitionId, + increment: I, + ) -> Result<(), Error> + where + I: iroha_primitives::CheckedOp + Copy, + NumericValue: From + TryAsMut, + eyre::Error: From<>::Error>, + { + let domain = self.domain_mut(&definition_id.domain_id)?; + let asset_total_amount: &mut I = domain + .asset_total_quantities.get_mut(definition_id) + .expect("Asset total amount not being found is a bug: check `Register` to insert initial total amount") + .try_as_mut() + .map_err(eyre::Error::from) + .map_err(|e| Error::Conversion(e.to_string()))?; + *asset_total_amount = asset_total_amount + .checked_add(increment) + .ok_or(MathError::Overflow)?; + let asset_total_amount = *asset_total_amount; + + self.emit_events({ + Some(DomainEvent::AssetDefinition( + AssetDefinitionEvent::TotalQuantityChanged(AssetDefinitionTotalQuantityChanged { + asset_definition_id: definition_id.clone(), + total_amount: NumericValue::from(asset_total_amount), + }), + )) + }); + + Ok(()) + } + + /// Decrease [`Asset`] total amount by given value /// - /// - (RARE) if applying transaction after validation fails. This - /// scenario is rare, because the `tx` validation implies applying - /// instructions directly to a clone of the wsv. If this happens, - /// you likely have data corruption. - /// - If trigger execution fails - /// - If timestamp conversion to `u64` fails - #[cfg_attr( - not(debug_assertions), - deprecated(note = "This function is to be used in testing only. ") - )] - #[iroha_logger::log(skip_all, fields(block_height))] - pub fn apply(&mut self, block: &CommittedBlock) -> Result<()> { - self.execute_transactions(block)?; - debug!("All block transactions successfully executed"); + /// # Errors + /// - [`AssetDefinition`], [`Domain`] not found + /// - Not enough quantity + pub fn decrease_asset_total_amount( + &mut self, + definition_id: &AssetDefinitionId, + decrement: I, + ) -> Result<(), Error> + where + I: iroha_primitives::CheckedOp + Copy, + NumericValue: From + TryAsMut, + eyre::Error: From<>::Error>, + { + let domain = self.domain_mut(&definition_id.domain_id)?; + let asset_total_amount: &mut I = domain + .asset_total_quantities.get_mut(definition_id) + .expect("Asset total amount not being found is a bug: check `Register` to insert initial total amount") + .try_as_mut() + .map_err(eyre::Error::from) + .map_err(|e| Error::Conversion(e.to_string()))?; + *asset_total_amount = asset_total_amount + .checked_sub(decrement) + .ok_or(MathError::NotEnoughQuantity)?; + let asset_total_amount = *asset_total_amount; - self.apply_without_execution(block)?; + self.emit_events({ + Some(DomainEvent::AssetDefinition( + AssetDefinitionEvent::TotalQuantityChanged(AssetDefinitionTotalQuantityChanged { + asset_definition_id: definition_id.clone(), + total_amount: NumericValue::from(asset_total_amount), + }), + )) + }); Ok(()) } - /// Apply transactions without actually executing them. - /// It's assumed that block's transaction was already executed (as part of validation for example). - #[iroha_logger::log(skip_all, fields(block_height = block.payload().header.height))] - pub fn apply_without_execution(&mut self, block: &CommittedBlock) -> Result<()> { - let block_hash = block.hash(); - trace!(%block_hash, "Applying block"); + /// Set new permission token schema. + /// + /// Produces [`PermissionTokenSchemaUpdateEvent`]. + pub fn set_permission_token_schema(&mut self, schema: PermissionTokenSchema) { + let old_schema = std::mem::replace(self.permission_token_schema.get_mut(), schema.clone()); + self.emit_events(std::iter::once(WorldEvent::PermissionTokenSchemaUpdate( + PermissionTokenSchemaUpdateEvent { + old_schema, + new_schema: schema, + }, + ))) + } - let time_event = self.create_time_event(block); - self.events_buffer.push(Event::Time(time_event)); + /// Execute trigger with `trigger_id` as id and `authority` as owner + /// + /// Produces [`ExecuteTriggerEvent`]. + /// + /// Trigger execution time: + /// - If this method is called by ISI inside *transaction*, + /// then *trigger* will be executed on the **current** block + /// - If this method is called by ISI inside *trigger*, + /// then *trigger* will be executed on the **next** block + pub fn execute_trigger(&mut self, trigger_id: TriggerId, authority: &AccountId) { + let event = ExecuteTriggerEvent { + trigger_id, + authority: authority.clone(), + }; - let block_height = block.payload().header.height; - block - .payload() - .transactions - .iter() - .map(|tx| &tx.value) - .map(SignedTransaction::hash) - .for_each(|tx_hash| { - self.transactions.insert(tx_hash, block_height); - }); + self.triggers.handle_execute_trigger_event(event.clone()); + self.events_buffer.push(event.into()); + } - self.world.triggers.handle_time_event(time_event); + /// The function puts events produced by iterator into `events_buffer`. + /// Events should be produced in the order of expanding scope: from specific to general. + /// Example: account events before domain events. + pub fn emit_events, T: Into>(&mut self, world_events: I) { + Self::emit_events_impl(&mut self.triggers, &mut self.events_buffer, world_events) + } - let res = self.process_triggers(); + /// Implementation of [`Self::emit_events()`]. + /// + /// Usable when you can't call [`Self::emit_events()`] due to mutable reference to self. + fn emit_events_impl, T: Into>( + triggers: &mut TriggerSet, + events_buffer: &mut TransactionEventBuffer<'_>, + world_events: I, + ) { + let data_events: SmallVec<[DataEvent; 3]> = world_events + .into_iter() + .map(Into::into) + .flat_map(WorldEvent::flatten) + .collect(); - if let Err(errors) = res { - warn!( - ?errors, - "The following errors have occurred during trigger execution" - ); + for event in data_events.iter() { + triggers.handle_data_event(event.clone()); } - self.block_hashes.push(block_hash); + events_buffer.extend(data_events.into_iter().map(Into::into)); + } - self.apply_parameters(); + insert_world_read_only_methods! {} +} - Ok(()) +impl TransactionEventBuffer<'_> { + fn push(&mut self, event: Event) { + self.events_created_in_transaction += 1; + self.events_buffer.push(event); } +} - fn apply_parameters(&mut self) { - use iroha_data_model::parameter::default::*; - macro_rules! update_params { - ($ident:ident, $($param:expr => $config:expr),+ $(,)?) => { - $(if let Some(param) = self.query_param($param) { - let $ident = &mut self.config; - $config = param; - })+ +impl Extend for TransactionEventBuffer<'_> { + fn extend>(&mut self, iter: T) { + let len_before = self.events_buffer.len(); + self.events_buffer.extend(iter); + let let_after = self.events_buffer.len(); + self.events_created_in_transaction += let_after - len_before; + } +} - }; - } - update_params! { - config, - WSV_ASSET_METADATA_LIMITS => config.asset_metadata_limits, - WSV_ASSET_DEFINITION_METADATA_LIMITS => config.asset_definition_metadata_limits, - WSV_ACCOUNT_METADATA_LIMITS => config.account_metadata_limits, - WSV_DOMAIN_METADATA_LIMITS => config.domain_metadata_limits, - WSV_IDENT_LENGTH_LIMITS => config.ident_length_limits, - WASM_FUEL_LIMIT => config.wasm_runtime_config.fuel_limit, - WASM_MAX_MEMORY => config.wasm_runtime_config.max_memory, - TRANSACTION_LIMITS => config.transaction_limits, - } +impl Drop for TransactionEventBuffer<'_> { + fn drop(&mut self) { + // remove events produced by current transaction + self.events_buffer + .truncate(self.events_buffer.len() - self.events_created_in_transaction); } +} - /// Get transaction executor - pub fn transaction_executor(&self) -> TransactionExecutor { - TransactionExecutor::new(self.config.transaction_limits) +impl WorldView<'_> { + insert_world_read_only_methods! {} + + /// Convert [`Self`] to [`WorldSnapshot`] + pub fn to_snapshot(&self) -> WorldSnapshot<'_> { + WorldSnapshot { + parameters: &self.parameters, + trusted_peers_ids: &self.trusted_peers_ids, + domains: self.domains.to_snapshot(), + roles: self.roles.to_snapshot(), + account_permission_tokens: self.account_permission_tokens.to_snapshot(), + account_roles: self.account_roles.to_snapshot(), + permission_token_schema: &self.permission_token_schema, + triggers: &self.triggers, + executor: &self.executor, + } } +} - /// Get a reference to the latest block. Returns none if genesis is not committed. +impl WorldSnapshot<'_> { + insert_world_read_only_methods! {} +} + +impl State { + /// Construct [`State`] with given [`World`]. + #[must_use] #[inline] - pub fn latest_block_ref(&self) -> Option> { - self.kura - .get_block_by_height(self.block_hashes.len() as u64) + pub fn new(world: World, kura: Arc, query_handle: LiveQueryStoreHandle) -> Self { + // Added to remain backward compatible with other code primary in tests + let config = ConfigurationProxy::default() + .build() + .expect("Wsv proxy always builds"); + Self::from_configuration(config, world, kura, query_handle) } - /// Create time event using previous and current blocks - fn create_time_event(&self, block: &CommittedBlock) -> TimeEvent { - let prev_interval = self.latest_block_ref().map(|latest_block| { - let header = &latest_block.payload().header; - - TimeInterval { - since: header.timestamp(), - length: header.consensus_estimation(), - } - }); + /// Construct [`State`] with specific [`Configuration`]. + #[inline] + pub fn from_configuration( + config: Configuration, + world: World, + kura: Arc, + query_handle: LiveQueryStoreHandle, + ) -> Self { + Self { + world, + config: Cell::new(config), + transactions: Storage::new(), + block_hashes: Cell::new(Vec::new()), + new_tx_amounts: Arc::new(Mutex::new(Vec::new())), + engine: wasm::create_engine(), + kura, + query_handle, + } + } - let interval = TimeInterval { - since: block.payload().header.timestamp(), - length: block.payload().header.consensus_estimation(), - }; + /// Create structure to execute block + pub fn block(&self, rollback_latest_block: bool) -> StateBlock<'_> { + StateBlock { + world: self.world.block(rollback_latest_block), + config: self.config.block(rollback_latest_block), + block_hashes: self.block_hashes.block(rollback_latest_block), + transactions: self.transactions.block(rollback_latest_block), + engine: &self.engine, + kura: &self.kura, + query_handle: &self.query_handle, + new_tx_amounts: &self.new_tx_amounts, + } + } - TimeEvent { - prev_interval, - interval, + /// Create point in time view of [`WorldState`] + pub fn view(&self) -> StateView<'_> { + StateView { + world: self.world.view(), + config: self.config.view(), + block_hashes: self.block_hashes.view(), + transactions: self.transactions.view(), + engine: &self.engine, + kura: &self.kura, + query_handle: &self.query_handle, + new_tx_amounts: &self.new_tx_amounts, } } +} - /// Execute `block` transactions and store their hashes as well as - /// `rejected_transactions` hashes - /// - /// # Errors - /// Fails if transaction instruction execution fails - fn execute_transactions(&mut self, block: &CommittedBlock) -> Result<()> { - // TODO: Should this block panic instead? - for tx in &block.payload().transactions { - if tx.error.is_none() { - self.process_executable( - tx.payload().instructions(), - tx.payload().authority.clone(), - )?; +macro_rules! world_state_read_only_methods { + ($($tt:tt)*) => { + macro_rules! insert_world_state_read_only_methods { + () => { + $($tt)* } } - - Ok(()) } +} - /// Get `Asset` by its id - /// - /// # Errors - /// - No such [`Asset`] - /// - The [`Account`] with which the [`Asset`] is associated doesn't exist. - /// - The [`Domain`] with which the [`Account`] is associated doesn't exist. - pub fn asset(&self, id: &AssetId) -> Result { - self.map_account( - &id.account_id, - |account| -> Result { - account - .assets - .get(id) - .ok_or_else(|| QueryExecutionFail::from(FindError::Asset(id.clone()))) - .map(Clone::clone) - }, - )? +// Read-only methods reused across `StateBlock`, `StateTransaction`, `StateView` +world_state_read_only_methods! { + // Block-related methods + + /// Get a reference to the latest block. Returns none if genesis is not committed. + #[inline] + pub fn latest_block_ref(&self) -> Option> { + self.kura + .get_block_by_height(self.block_hashes.len() as u64) } - /// Get asset or inserts new with `default_asset_value`. - /// - /// # Errors - /// - There is no account with such name. - #[allow(clippy::missing_panics_doc)] - pub fn asset_or_insert( - &mut self, - asset_id: AssetId, - default_asset_value: impl Into, - ) -> Result<&mut Asset, Error> { - // Check that asset definition exists - { - let asset_definition_id = &asset_id.definition_id; - let asset_definition_domain_id = &asset_id.definition_id.domain_id; - let asset_definition_domain = self - .world - .domains - .get(asset_definition_domain_id) - .ok_or(FindError::Domain(asset_definition_domain_id.clone()))?; - asset_definition_domain - .asset_definitions - .get(asset_definition_id) - .ok_or(FindError::AssetDefinition(asset_definition_id.clone()))?; - } + /// Return the hash of the latest block + pub fn latest_block_hash(&self) -> Option> { + self.block_hashes.iter().nth_back(0).copied() + } - let account_id = &asset_id.account_id; - let account_domain = self - .world - .domains - .get_mut(&asset_id.account_id.domain_id) - .ok_or(FindError::Domain(asset_id.account_id.domain_id.clone()))?; - let account = account_domain - .accounts - .get_mut(account_id) - .ok_or(FindError::Account(account_id.clone()))?; + /// Return the view change index of the latest block + pub fn latest_block_view_change_index(&self) -> u64 { + self.kura + .get_block_by_height(self.height()) + .map_or(0, |block| block.payload().header.view_change_index) + } - Ok(account.assets.entry(asset_id.clone()).or_insert_with(|| { - let asset = Asset::new(asset_id, default_asset_value.into()); - Self::emit_events_impl( - &mut self.world.triggers, - &mut self.events_buffer, - Some(AccountEvent::Asset(AssetEvent::Created(asset.clone()))), - ); - asset - })) + /// Return the hash of the block one before the latest block + pub fn previous_block_hash(&self) -> Option> { + self.block_hashes.iter().nth_back(1).copied() } /// Load all blocks in the block chain from disc @@ -850,17 +1109,6 @@ impl WorldStateView { ) } - /// Return mutable reference to the [`World`] - pub fn world_mut(&mut self) -> &mut World { - &mut self.world - } - - /// Returns reference for trusted peer ids - #[inline] - pub fn peers_ids(&self) -> &PeersIds { - &self.world.trusted_peers_ids - } - /// Return an iterator over blockchain block hashes starting with the block of the given `height` pub fn block_hashes_from_height(&self, height: usize) -> Vec> { self.block_hashes @@ -870,83 +1118,16 @@ impl WorldStateView { .collect() } - /// Get `Domain` without an ability to modify it. - /// - /// # Errors - /// Fails if there is no domain - pub fn domain<'wsv>(&'wsv self, id: &DomainId) -> Result<&'wsv Domain, FindError> { - let domain = self - .world - .domains - .get(id) - .ok_or_else(|| FindError::Domain(id.clone()))?; - Ok(domain) - } - - /// Get `Domain` with an ability to modify it. - /// - /// # Errors - /// Fails if there is no domain - pub fn domain_mut(&mut self, id: &DomainId) -> Result<&mut Domain, FindError> { - let domain = self - .world - .domains - .get_mut(id) - .ok_or_else(|| FindError::Domain(id.clone()))?; - Ok(domain) - } - - /// Returns reference for domains map - #[inline] - pub fn domains(&self) -> &DomainsMap { - &self.world.domains - } - - /// Get `Domain` and pass it to closure. - /// - /// # Errors - /// Fails if there is no domain - pub fn map_domain<'wsv, T>( - &'wsv self, - id: &DomainId, - f: impl FnOnce(&'wsv Domain) -> T, - ) -> Result { - let domain = self.domain(id)?; - let value = f(domain); - Ok(value) - } - - /// Get all roles - #[inline] - pub fn roles(&self) -> &crate::RolesMap { - &self.world.roles - } - - /// Get all permission token definitions - #[inline] - pub fn permission_token_schema(&self) -> &crate::PermissionTokenSchema { - &self.world.permission_token_schema - } - - /// Construct [`WorldStateView`] with specific [`Configuration`]. + /// Height of blockchain #[inline] - pub fn from_configuration( - config: Configuration, - world: World, - kura: Arc, - query_handle: LiveQueryStoreHandle, - ) -> Self { - Self { - world, - config, - transactions: HashMap::new(), - block_hashes: Vec::new(), - events_buffer: Vec::new(), - new_tx_amounts: Arc::new(Mutex::new(Vec::new())), - engine: wasm::create_engine(), - kura, - query_handle, - } + pub fn height(&self) -> u64 { + self.block_hashes.len() as u64 + } + + /// Find a [`SignedBlock`] by hash. + pub fn block_with_tx(&self, hash: &HashOf) -> Option> { + let height = *self.transactions.get(hash)?; + self.kura.get_block_by_height(height) } /// Returns [`Some`] milliseconds since the genesis block was @@ -971,338 +1152,339 @@ impl WorldStateView { /// Check if this [`SignedTransaction`] is already committed or rejected. #[inline] pub fn has_transaction(&self, hash: HashOf) -> bool { - self.transactions.contains_key(&hash) + self.transactions.get(&hash).is_some() } - /// Height of blockchain - #[inline] - pub fn height(&self) -> u64 { - self.block_hashes.len() as u64 + /// Get transaction executor + pub fn transaction_executor(&self) -> TransactionExecutor { + TransactionExecutor::new(self.config.transaction_limits) } +} - /// Return the hash of the latest block - pub fn latest_block_hash(&self) -> Option> { - self.block_hashes.iter().nth_back(0).copied() +impl<'state> StateBlock<'state> { + /// Create struct to store changes during transaction or trigger execution + pub fn transaction(&mut self) -> StateTransaction<'_, 'state> { + StateTransaction { + world: self.world.trasaction(), + config: self.config.transaction(), + block_hashes: self.block_hashes.transaction(), + transactions: self.transactions.transaction(), + engine: self.engine, + kura: self.kura, + query_handle: self.query_handle, + new_tx_amounts: self.new_tx_amounts, + } } - /// Return the view change index of the latest block - pub fn latest_block_view_change_index(&self) -> u64 { - self.kura - .get_block_by_height(self.height()) - .map_or(0, |block| block.payload().header.view_change_index) + /// Commit changes aggregated during application of block + pub fn commit(self) { + self.transactions.commit(); + self.block_hashes.commit(); + self.config.commit(); + self.world.commit(); } - /// Return the hash of the block one before the latest block - pub fn previous_block_hash(&self) -> Option> { - self.block_hashes.iter().nth_back(1).copied() + /// Convert [`Self`] to [`WorldStateSnapshot`] + pub fn to_snapshot(&self) -> StateSnapshot<'_> { + StateSnapshot { + world: self.world.to_snapshot(), + config: &self.config, + block_hashes: &self.block_hashes, + transactions: self.transactions.to_snapshot(), + engine: self.engine, + kura: self.kura, + query_handle: self.query_handle, + new_tx_amounts: self.new_tx_amounts, + } } - /// Get `Account` and pass it to closure. + /// Commit `CommittedBlock` with changes in form of **Iroha Special + /// Instructions** to `self`. /// - /// # Errors - /// Fails if there is no domain or account - pub fn map_account<'wsv, T>( - &'wsv self, - id: &AccountId, - f: impl FnOnce(&'wsv Account) -> T, - ) -> Result { - let domain = self.domain(&id.domain_id)?; - let account = domain - .accounts - .get(id) - .ok_or(FindError::Account(id.clone()))?; - Ok(f(account)) - } - - /// Get `Account` and return reference to it. + /// Order of execution: + /// 1) Transactions + /// 2) Triggers /// /// # Errors - /// Fails if there is no domain or account - pub fn account(&self, id: &AccountId) -> Result<&Account, FindError> { - self.domain(&id.domain_id).and_then(|domain| { - domain - .accounts - .get(id) - .ok_or_else(|| FindError::Account(id.clone())) - }) - } - - /// Get mutable reference to [`Account`] /// - /// # Errors - /// Fail if domain or account not found - pub fn account_mut(&mut self, id: &AccountId) -> Result<&mut Account, FindError> { - self.domain_mut(&id.domain_id).and_then(move |domain| { - domain - .accounts - .get_mut(id) - .ok_or_else(|| FindError::Account(id.clone())) - }) - } + /// - (RARE) if applying transaction after validation fails. This + /// scenario is rare, because the `tx` validation implies applying + /// instructions directly to a clone of the wsv. If this happens, + /// you likely have data corruption. + /// - If trigger execution fails + /// - If timestamp conversion to `u64` fails + #[cfg_attr( + not(debug_assertions), + deprecated(note = "This function is to be used in testing only. ") + )] + #[iroha_logger::log(skip_all, fields(block_height))] + pub fn apply(&mut self, block: &CommittedBlock) -> Result<()> { + self.execute_transactions(block)?; + debug!("All block transactions successfully executed"); - /// Get mutable reference to [`Asset`] - /// - /// # Errors - /// If domain, account or asset not found - pub fn asset_mut(&mut self, id: &AssetId) -> Result<&mut Asset, FindError> { - self.account_mut(&id.account_id).and_then(move |account| { - account - .assets - .get_mut(id) - .ok_or_else(|| FindError::Asset(id.clone())) - }) + self.apply_without_execution(block)?; + + Ok(()) } - /// Get mutable reference to [`AssetDefinition`] + /// Execute `block` transactions and store their hashes as well as + /// `rejected_transactions` hashes /// /// # Errors - /// If domain or asset definition not found - pub fn asset_definition_mut( - &mut self, - id: &AssetDefinitionId, - ) -> Result<&mut AssetDefinition, FindError> { - self.domain_mut(&id.domain_id).and_then(|domain| { - domain - .asset_definitions - .get_mut(id) - .ok_or_else(|| FindError::AssetDefinition(id.clone())) - }) - } - - /// Get an immutable iterator over the [`PeerId`]s. - pub fn peers(&self) -> impl ExactSizeIterator { - self.world.trusted_peers_ids.iter() - } + /// Fails if transaction instruction execution fails + fn execute_transactions(&mut self, block: &CommittedBlock) -> Result<()> { + // TODO: Should this block panic instead? + for tx in &block.payload().transactions { + if tx.error.is_none() { + // Execute every tx in it's own transaction + let mut transaction = self.transaction(); + transaction.process_executable( + tx.payload().instructions(), + tx.payload().authority.clone(), + )?; + transaction.apply(); + } + } - /// Get all `Parameter`s registered in the world. - pub fn parameters(&self) -> impl ExactSizeIterator { - self.world.parameters.iter() + Ok(()) } - /// Query parameter and convert it to a proper type - pub fn query_param, P: core::hash::Hash + Eq + ?Sized>( - &self, - param: &P, - ) -> Option - where - Parameter: Borrow

, - { - self.world - .parameters - .get(param) - .as_ref() - .map(|param| &*param.val) - .cloned() - .and_then(|param_val| param_val.try_into().ok()) - } + /// Apply transactions without actually executing them. + /// It's assumed that block's transaction was already executed (as part of validation for example). + #[iroha_logger::log(skip_all, fields(block_height = block.payload().header.height))] + pub fn apply_without_execution(&mut self, block: &CommittedBlock) -> Result<()> { + let block_hash = block.hash(); + trace!(%block_hash, "Applying block"); - /// Get `AssetDefinition` immutable view. - /// - /// # Errors - /// - Asset definition entry not found - pub fn asset_definition( - &self, - asset_id: &AssetDefinitionId, - ) -> Result { - self.domain(&asset_id.domain_id)? - .asset_definitions - .get(asset_id) - .ok_or_else(|| FindError::AssetDefinition(asset_id.clone())) - .map(Clone::clone) - } + let time_event = self.create_time_event(block); + self.world.events_buffer.push(Event::Time(time_event)); - /// Get total amount of [`Asset`]. - /// - /// # Errors - /// - Asset definition not found - pub fn asset_total_amount( - &self, - definition_id: &AssetDefinitionId, - ) -> Result { - self.domain(&definition_id.domain_id)? - .asset_total_quantities - .get(definition_id) - .ok_or_else(|| FindError::AssetDefinition(definition_id.clone())) - .copied() - } + let block_height = block.payload().header.height; + block + .payload() + .transactions + .iter() + .map(|tx| &tx.value) + .map(SignedTransaction::hash) + .for_each(|tx_hash| { + self.transactions.insert(tx_hash, block_height); + }); - /// Increase [`Asset`] total amount by given value - /// - /// # Errors - /// - [`AssetDefinition`], [`Domain`] not found - /// - Overflow - pub fn increase_asset_total_amount( - &mut self, - definition_id: &AssetDefinitionId, - increment: I, - ) -> Result<(), Error> - where - I: iroha_primitives::CheckedOp + Copy, - NumericValue: From + TryAsMut, - eyre::Error: From<>::Error>, - { - let domain = self.domain_mut(&definition_id.domain_id)?; - let asset_total_amount: &mut I = domain - .asset_total_quantities.get_mut(definition_id) - .expect("Asset total amount not being found is a bug: check `Register` to insert initial total amount") - .try_as_mut() - .map_err(eyre::Error::from) - .map_err(|e| Error::Conversion(e.to_string()))?; - *asset_total_amount = asset_total_amount - .checked_add(increment) - .ok_or(MathError::Overflow)?; - let asset_total_amount = *asset_total_amount; + self.world.triggers.handle_time_event(time_event); - self.emit_events({ - Some(DomainEvent::AssetDefinition( - AssetDefinitionEvent::TotalQuantityChanged(AssetDefinitionTotalQuantityChanged { - asset_definition_id: definition_id.clone(), - total_amount: NumericValue::from(asset_total_amount), - }), - )) - }); + let res = self.process_triggers(); - Ok(()) - } + if let Err(errors) = res { + warn!( + ?errors, + "The following errors have occurred during trigger execution" + ); + } - /// Decrease [`Asset`] total amount by given value - /// - /// # Errors - /// - [`AssetDefinition`], [`Domain`] not found - /// - Not enough quantity - pub fn decrease_asset_total_amount( - &mut self, - definition_id: &AssetDefinitionId, - decrement: I, - ) -> Result<(), Error> - where - I: iroha_primitives::CheckedOp + Copy, - NumericValue: From + TryAsMut, - eyre::Error: From<>::Error>, - { - let domain = self.domain_mut(&definition_id.domain_id)?; - let asset_total_amount: &mut I = domain - .asset_total_quantities.get_mut(definition_id) - .expect("Asset total amount not being found is a bug: check `Register` to insert initial total amount") - .try_as_mut() - .map_err(eyre::Error::from) - .map_err(|e| Error::Conversion(e.to_string()))?; - *asset_total_amount = asset_total_amount - .checked_sub(decrement) - .ok_or(MathError::NotEnoughQuantity)?; - let asset_total_amount = *asset_total_amount; + self.block_hashes.push(block_hash); - self.emit_events({ - Some(DomainEvent::AssetDefinition( - AssetDefinitionEvent::TotalQuantityChanged(AssetDefinitionTotalQuantityChanged { - asset_definition_id: definition_id.clone(), - total_amount: NumericValue::from(asset_total_amount), - }), - )) - }); + self.apply_parameters(); Ok(()) } - /// Find a [`SignedBlock`] by hash. - pub fn block_with_tx(&self, hash: &HashOf) -> Option> { - let height = *self.transactions.get(hash)?; - self.kura.get_block_by_height(height) - } + /// Create time event using previous and current blocks + fn create_time_event(&self, block: &CommittedBlock) -> TimeEvent { + let prev_interval = self.latest_block_ref().map(|latest_block| { + let header = &latest_block.payload().header; - /// Get an immutable view of the `World`. - #[must_use] - #[inline] - pub fn world(&self) -> &World { - &self.world + TimeInterval { + since: header.timestamp(), + length: header.consensus_estimation(), + } + }); + + let interval = TimeInterval { + since: block.payload().header.timestamp(), + length: block.payload().header.consensus_estimation(), + }; + + TimeEvent { + prev_interval, + interval, + } } - /// Returns reference for triggers - #[inline] - pub fn triggers(&self) -> &TriggerSet { - &self.world.triggers + /// Process every trigger in `matched_ids` + fn process_triggers(&mut self) -> Result<(), Vec> { + // Cloning and clearing `self.matched_ids` so that `handle_` call won't deadlock + let matched_ids = self.world.triggers.extract_matched_ids(); + let mut succeed = Vec::::with_capacity(matched_ids.len()); + let mut errors = Vec::new(); + for (event, id) in matched_ids { + // Eliding the closure triggers a lifetime mismatch + #[allow(clippy::redundant_closure_for_method_calls)] + let action = self + .world + .triggers + .inspect_by_id(&id, |action| action.clone_and_box()); + if let Some(action) = action { + if let Repeats::Exactly(repeats) = action.repeats() { + if *repeats == 0 { + continue; + } + } + // Execute every trigger in it's own transaction + let event = { + let mut transaction = self.transaction(); + match transaction.process_trigger(&id, &action, event) { + Ok(_) => { + transaction.apply(); + succeed.push(id.clone()); + TriggerCompletedEvent::new(id, TriggerCompletedOutcome::Success) + } + Err(error) => { + let event = TriggerCompletedEvent::new( + id, + TriggerCompletedOutcome::Failure(error.to_string()), + ); + errors.push(error); + event + } + } + }; + self.world + .events_buffer + .push(NotificationEvent::from(event).into()); + } + } + + self.world.triggers.decrease_repeats(&succeed); + + errors.is_empty().then_some(()).ok_or(errors) } - /// Return mutable reference for triggers - #[inline] - pub fn triggers_mut(&mut self) -> &mut TriggerSet { - &mut self.world.triggers + fn apply_parameters(&mut self) { + use iroha_data_model::parameter::default::*; + macro_rules! update_params { + ($ident:ident, $($param:expr => $config:expr),+ $(,)?) => { + $(if let Some(param) = self.world.query_param($param) { + let $ident = &mut self.config; + $config = param; + })+ + + }; + } + update_params! { + config, + WSV_ASSET_METADATA_LIMITS => config.asset_metadata_limits, + WSV_ASSET_DEFINITION_METADATA_LIMITS => config.asset_definition_metadata_limits, + WSV_ACCOUNT_METADATA_LIMITS => config.account_metadata_limits, + WSV_DOMAIN_METADATA_LIMITS => config.domain_metadata_limits, + WSV_IDENT_LENGTH_LIMITS => config.ident_length_limits, + WASM_FUEL_LIMIT => config.wasm_runtime_config.fuel_limit, + WASM_MAX_MEMORY => config.wasm_runtime_config.max_memory, + TRANSACTION_LIMITS => config.transaction_limits, + } } - /// Execute trigger with `trigger_id` as id and `authority` as owner - /// - /// Produces [`ExecuteTriggerEvent`]. - /// - /// Trigger execution time: - /// - If this method is called by ISI inside *transaction*, - /// then *trigger* will be executed on the **current** block - /// - If this method is called by ISI inside *trigger*, - /// then *trigger* will be executed on the **next** block - pub fn execute_trigger(&mut self, trigger_id: TriggerId, authority: &AccountId) { - let event = ExecuteTriggerEvent { - trigger_id, - authority: authority.clone(), - }; + insert_world_state_read_only_methods! {} +} - self.world - .triggers - .handle_execute_trigger_event(event.clone()); - self.events_buffer.push(event.into()); +impl StateTransaction<'_, '_> { + /// Apply transaction making it's changes visible + pub fn apply(self) { + self.transactions.apply(); + self.block_hashes.apply(); + self.config.apply(); + self.world.apply(); + } + + /// Convert [`Self`] to [`WorldStateSnapshot`] + pub fn to_snapshot(&self) -> StateSnapshot<'_> { + StateSnapshot { + world: self.world.to_snapshot(), + config: &self.config, + block_hashes: &self.block_hashes, + transactions: self.transactions.to_snapshot(), + engine: self.engine, + kura: self.kura, + query_handle: self.query_handle, + new_tx_amounts: self.new_tx_amounts, + } } - /// Get [`Executor`]. - pub fn executor(&self) -> &Executor { - &self.world.executor + fn process_executable(&mut self, executable: &Executable, authority: AccountId) -> Result<()> { + match executable { + Executable::Instructions(instructions) => { + self.process_instructions(instructions.iter().cloned(), &authority) + } + Executable::Wasm(bytes) => { + let mut wasm_runtime = wasm::RuntimeBuilder::::new() + .with_configuration(self.config.wasm_runtime_config) + .with_engine(self.engine.clone()) // Cloning engine is cheap + .build()?; + wasm_runtime + .execute(self, authority, bytes) + .map_err(Into::into) + } + } } - /// The function puts events produced by iterator into `events_buffer`. - /// Events should be produced in the order of expanding scope: from specific to general. - /// Example: account events before domain events. - pub fn emit_events, T: Into>(&mut self, world_events: I) { - Self::emit_events_impl( - &mut self.world.triggers, - &mut self.events_buffer, - world_events, - ) + fn process_instructions( + &mut self, + instructions: impl IntoIterator, + authority: &AccountId, + ) -> Result<()> { + instructions.into_iter().try_for_each(|instruction| { + instruction.execute(authority, self)?; + Ok::<_, eyre::Report>(()) + }) } - /// Implementation of [`Self::emit_events()`]. - /// - /// Usable when you can't call [`Self::emit_events()`] due to mutable reference to self. - fn emit_events_impl, T: Into>( - triggers: &mut TriggerSet, - events_buffer: &mut Vec, - world_events: I, - ) { - let data_events: SmallVec<[DataEvent; 3]> = world_events - .into_iter() - .map(Into::into) - .flat_map(WorldEvent::flatten) - .collect(); + fn process_trigger( + &mut self, + id: &TriggerId, + action: &dyn LoadedActionTrait, + event: Event, + ) -> Result<()> { + use triggers::set::LoadedExecutable::*; + let authority = action.authority(); - for event in data_events.iter() { - triggers.handle_data_event(event.clone()); + match action.executable() { + Instructions(instructions) => { + self.process_instructions(instructions.iter().cloned(), authority) + } + Wasm(LoadedWasm { module, .. }) => { + let mut wasm_runtime = wasm::RuntimeBuilder::::new() + .with_configuration(self.config.wasm_runtime_config) + .with_engine(self.engine.clone()) // Cloning engine is cheap + .build()?; + wasm_runtime + .execute_trigger_module(self, id, authority.clone(), module, event) + .map_err(Into::into) + } } - events_buffer.extend(data_events.into_iter().map(Into::into)); } - /// Set new permission token schema. - /// - /// Produces [`PermissionTokenSchemaUpdateEvent`]. - pub fn set_permission_token_schema(&mut self, schema: PermissionTokenSchema) { - let old_schema = std::mem::replace(&mut self.world.permission_token_schema, schema.clone()); - self.emit_events(std::iter::once(WorldEvent::PermissionTokenSchemaUpdate( - PermissionTokenSchemaUpdateEvent { - old_schema, - new_schema: schema, - }, - ))) - } + insert_world_state_read_only_methods! {} +} - /// Get reference to the [`LiveQueryStoreHandle`]. - pub fn query_handle(&self) -> &LiveQueryStoreHandle { - &self.query_handle +impl StateView<'_> { + /// Convert [`Self`] to [`WorldStateSnapshot`] + pub fn to_snapshot(&self) -> StateSnapshot<'_> { + StateSnapshot { + world: self.world.to_snapshot(), + config: &self.config, + block_hashes: &self.block_hashes, + transactions: self.transactions.to_snapshot(), + engine: self.engine, + kura: self.kura, + query_handle: self.query_handle, + new_tx_amounts: self.new_tx_amounts, + } } + + insert_world_state_read_only_methods! {} +} + +impl StateSnapshot<'_> { + insert_world_state_read_only_methods! {} } /// Bounds for `range` queries @@ -1369,6 +1551,270 @@ mod range_bounds { } } +pub(crate) mod deserialize { + use storage::serde::CellSeeded; + + use super::*; + + // Loader for [`Set`] + #[derive(Clone, Copy)] + pub struct WasmSeed<'e, T> { + pub engine: &'e wasmtime::Engine, + _marker: PhantomData, + } + + impl<'e, T> WasmSeed<'e, T> { + pub fn cast(&self) -> WasmSeed<'e, U> { + WasmSeed { + engine: self.engine, + _marker: PhantomData, + } + } + } + + impl<'e, 'de, T> DeserializeSeed<'de> for WasmSeed<'e, Option> + where + WasmSeed<'e, T>: DeserializeSeed<'de, Value = T>, + { + type Value = Option; + + fn deserialize(self, deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct OptionVisitor<'l, T> { + loader: WasmSeed<'l, T>, + _marker: PhantomData, + } + + impl<'e, 'de, T> Visitor<'de> for OptionVisitor<'e, T> + where + WasmSeed<'e, T>: DeserializeSeed<'de, Value = T>, + { + type Value = Option; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("struct World") + } + + fn visit_none(self) -> Result + where + E: serde::de::Error, + { + Ok(None) + } + + fn visit_some(self, deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Some(self.loader.deserialize(deserializer)).transpose() + } + } + + let visitor = OptionVisitor { + loader: self.cast::(), + _marker: PhantomData, + }; + deserializer.deserialize_option(visitor) + } + } + + impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { + type Value = World; + + fn deserialize(self, deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct WorldVisitor<'l> { + loader: &'l WasmSeed<'l, World>, + } + + impl<'de> Visitor<'de> for WorldVisitor<'_> { + type Value = World; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("struct World") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'de>, + { + let mut parameters = None; + let mut trusted_peers_ids = None; + let mut domains = None; + let mut roles = None; + let mut account_permission_tokens = None; + let mut account_roles = None; + let mut permission_token_schema = None; + let mut triggers = None; + let mut executor = None; + + while let Some(key) = map.next_key::()? { + match key.as_str() { + "parameters" => { + parameters = Some(map.next_value()?); + } + "trusted_peers_ids" => { + trusted_peers_ids = Some(map.next_value()?); + } + "domains" => { + domains = Some(map.next_value()?); + } + "roles" => { + roles = Some(map.next_value()?); + } + "account_permission_tokens" => { + account_permission_tokens = Some(map.next_value()?); + } + "account_roles" => { + account_roles = Some(map.next_value()?); + } + "permission_token_schema" => { + permission_token_schema = Some(map.next_value()?); + } + "triggers" => { + triggers = Some(map.next_value_seed(CellSeeded { + seed: self.loader.cast::(), + })?); + } + "executor" => { + executor = Some(map.next_value_seed(CellSeeded { + seed: self.loader.cast::(), + })?); + } + _ => { /* Skip unknown fields */ } + } + } + + Ok(World { + parameters: parameters + .ok_or_else(|| serde::de::Error::missing_field("parameters"))?, + trusted_peers_ids: trusted_peers_ids + .ok_or_else(|| serde::de::Error::missing_field("trusted_peers_ids"))?, + domains: domains + .ok_or_else(|| serde::de::Error::missing_field("domains"))?, + roles: roles.ok_or_else(|| serde::de::Error::missing_field("roles"))?, + account_permission_tokens: account_permission_tokens.ok_or_else(|| { + serde::de::Error::missing_field("account_permission_tokens") + })?, + account_roles: account_roles + .ok_or_else(|| serde::de::Error::missing_field("account_roles"))?, + permission_token_schema: permission_token_schema.ok_or_else(|| { + serde::de::Error::missing_field("permission_token_schema") + })?, + triggers: triggers + .ok_or_else(|| serde::de::Error::missing_field("triggers"))?, + executor: executor + .ok_or_else(|| serde::de::Error::missing_field("executor"))?, + }) + } + } + + deserializer.deserialize_struct( + "World", + &[ + "parameters", + "trusted_peers_ids", + "domains", + "roles", + "account_permission_tokens", + "account_roles", + "permission_token_schema", + "triggers", + "executor", + ], + WorldVisitor { loader: &self }, + ) + } + } + + /// Context necessary for deserializing [`State`] + pub struct KuraSeed { + /// Kura subsystem reference + pub kura: Arc, + /// Handle to the [`LiveQueryStore`](crate::query::store::LiveQueryStore). + pub query_handle: LiveQueryStoreHandle, + } + + impl<'de> DeserializeSeed<'de> for KuraSeed { + type Value = State; + + fn deserialize(self, deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct StateVisitor { + loader: KuraSeed, + } + + impl<'de> Visitor<'de> for StateVisitor { + type Value = State; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("struct WorldState") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'de>, + { + let mut world = None; + let mut config = None; + let mut block_hashes = None; + let mut transactions = None; + + let engine = wasm::create_engine(); + + let wasm_seed: WasmSeed<()> = WasmSeed { + engine: &engine, + _marker: PhantomData, + }; + + while let Some(key) = map.next_key::()? { + match key.as_str() { + "world" => { + world = Some(map.next_value_seed(wasm_seed.cast::())?); + } + "config" => { + config = Some(map.next_value()?); + } + "block_hashes" => { + block_hashes = Some(map.next_value()?); + } + "transactions" => { + transactions = Some(map.next_value()?); + } + _ => { /* Skip unknown fields */ } + } + } + + Ok(State { + world: world.ok_or_else(|| serde::de::Error::missing_field("world"))?, + config: config.ok_or_else(|| serde::de::Error::missing_field("config"))?, + block_hashes: block_hashes + .ok_or_else(|| serde::de::Error::missing_field("block_hashes"))?, + transactions: transactions + .ok_or_else(|| serde::de::Error::missing_field("transactions"))?, + kura: self.loader.kura, + query_handle: self.loader.query_handle, + engine, + new_tx_amounts: Arc::new(Mutex::new(Vec::new())), + }) + } + } + + deserializer.deserialize_struct( + "WorldState", + &["world", "config", "block_hashes", "transactions"], + StateVisitor { loader: self }, + ) + } + } +} + #[cfg(test)] mod tests { use iroha_primitives::unique_vec::UniqueVec; @@ -1387,7 +1833,8 @@ mod tests { let block = ValidBlock::new_dummy().commit(&topology).unwrap(); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(World::default(), kura, query_handle); + let wsv = State::new(World::default(), kura, query_handle); + let mut wsv = wsv.block(false); let mut block_hashes = vec![]; for i in 1..=BLOCK_CNT { @@ -1414,7 +1861,8 @@ mod tests { let block = ValidBlock::new_dummy().commit(&topology).unwrap(); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(World::default(), kura.clone(), query_handle); + let wsv = State::new(World::default(), kura.clone(), query_handle); + let mut wsv = wsv.block(false); for i in 1..=BLOCK_CNT { let mut block = block.clone(); diff --git a/telemetry/derive/src/lib.rs b/telemetry/derive/src/lib.rs index 779b3cdba0b..5946a81ba97 100644 --- a/telemetry/derive/src/lib.rs +++ b/telemetry/derive/src/lib.rs @@ -17,13 +17,12 @@ use syn::{ const TOTAL_STR: &str = "total"; #[cfg(feature = "metric-instrumentation")] const SUCCESS_STR: &str = "success"; -const WSV_STRING: &str = "WorldStateView"; fn type_has_metrics_field(ty: &Type) -> bool { match ty { // This may seem fragile, but it isn't. We use the same convention // everywhere in the code base, and if you follow `CONTRIBUTING.md` - // you'll likely have `use iroha_core::WorldStateView` + // you'll likely have `use iroha_core::{StateTransaction, StateSnapshot}` // somewhere. If you don't, you're violating the `CONTRIBUTING.md` in // more than one way. Type::Path(pth) => { @@ -32,7 +31,7 @@ fn type_has_metrics_field(ty: &Type) -> bool { .last() .expect("Should have at least one segment") .ident; - *type_name == WSV_STRING + *type_name == "StateSnapshot" || *type_name == "StateTransaction" } _ => false, } @@ -42,7 +41,7 @@ fn type_has_metrics_field(ty: &Type) -> bool { /// metrics. /// /// # Errors -/// If no argument is of type `WorldStateView`. +/// If no argument is of type `StateTransaction` of `StateSnapshot`. fn arg_metrics(input: &Punctuated) -> Result> { input .iter() @@ -137,11 +136,11 @@ impl ToTokens for MetricSpec { /// # Examples /// /// ```rust -/// use iroha_core::wsv::{World, WorldStateView}; +/// use iroha_core::wsv::{World, StateTransaction}; /// use iroha_telemetry_derive::metrics; /// /// #[metrics(+"test_query", "another_test_query_without_timing")] -/// fn execute(wsv: &WorldStateView) -> Result<(), ()> { +/// fn execute(wsv: &StateTransaction) -> Result<(), ()> { /// Ok(()) /// } /// ``` @@ -185,14 +184,18 @@ pub fn metrics(attr: TokenStream, item: TokenStream) -> TokenStream { if sig.inputs.is_empty() { abort!( sig, - "Function must have at least one argument of type `WorldStateView`." + "Function must have at least one argument of type `StateTransaction` or `StateSnapshot`." ); } let _specs = parse_macro_input!(attr as MetricSpecs); // Again this may seem fragile, but if we move the metrics from - // the `WorldStateView`, we'd need to refactor many things anyway - let _metric_arg_ident = arg_metrics(&sig.inputs) - .unwrap_or_else(|args| abort!(args, "At least one argument must be a `WorldStateView`.")); + // the state, we'd need to refactor many things anyway + let _metric_arg_ident = arg_metrics(&sig.inputs).unwrap_or_else(|args| { + abort!( + args, + "At least one argument must be a `StateTransaction` or `StateSnapshot`." + ) + }); #[cfg(feature = "metric-instrumentation")] let res = { diff --git a/telemetry/derive/tests/ui_fail/args_no_wsv.stderr b/telemetry/derive/tests/ui_fail/args_no_wsv.stderr index 4aa2e1da1c3..a36e7a233b7 100644 --- a/telemetry/derive/tests/ui_fail/args_no_wsv.stderr +++ b/telemetry/derive/tests/ui_fail/args_no_wsv.stderr @@ -1,4 +1,4 @@ -error: At least one argument must be a `WorldStateView`. +error: At least one argument must be a `StateTransaction` or `StateSnapshot`. --> tests/ui_fail/args_no_wsv.rs:4:12 | 4 | fn execute(_wsv: &World) -> Result<(), ()> { diff --git a/telemetry/derive/tests/ui_fail/bare_spec.rs b/telemetry/derive/tests/ui_fail/bare_spec.rs index bb6029fddf1..fc79eeffa20 100644 --- a/telemetry/derive/tests/ui_fail/bare_spec.rs +++ b/telemetry/derive/tests/ui_fail/bare_spec.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(test_query, "another_test_query_without_timing")] -fn execute(wsv: &WorldStateView) -> Result<(), ()> { +fn execute(wsv: &StateTransaction) -> Result<(), ()> { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/doubled_plus.rs b/telemetry/derive/tests/ui_fail/doubled_plus.rs index 61db9e0dda1..94e6db6c258 100644 --- a/telemetry/derive/tests/ui_fail/doubled_plus.rs +++ b/telemetry/derive/tests/ui_fail/doubled_plus.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test_query", ++"another_test_query_without_timing")] -fn execute(wsv: &WorldStateView) -> Result<(), ()> { +fn execute(wsv: &StateTransaction) -> Result<(), ()> { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/no_args.stderr b/telemetry/derive/tests/ui_fail/no_args.stderr index bf2d6e9b557..db72e9a9c2b 100644 --- a/telemetry/derive/tests/ui_fail/no_args.stderr +++ b/telemetry/derive/tests/ui_fail/no_args.stderr @@ -1,4 +1,4 @@ -error: Function must have at least one argument of type `WorldStateView`. +error: Function must have at least one argument of type `StateTransaction` or `StateSnapshot`. --> tests/ui_fail/no_args.rs:4:1 | 4 | fn execute() -> Result<(), ()> { diff --git a/telemetry/derive/tests/ui_fail/non_snake_case_name.rs b/telemetry/derive/tests/ui_fail/non_snake_case_name.rs index 97c83ab152f..6ae5ece5b50 100644 --- a/telemetry/derive/tests/ui_fail/non_snake_case_name.rs +++ b/telemetry/derive/tests/ui_fail/non_snake_case_name.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test query", "another_test_query_without_timing")] -fn execute(wsv: &WorldStateView) -> Result<(), ()> { +fn execute(wsv: &StateTransaction) -> Result<(), ()> { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/not_execute.rs b/telemetry/derive/tests/ui_fail/not_execute.rs index 7a63c17d08f..fe5299fc69a 100644 --- a/telemetry/derive/tests/ui_fail/not_execute.rs +++ b/telemetry/derive/tests/ui_fail/not_execute.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] -fn exequte(wsv: &WorldStateView) -> Result<(), ()> { +fn exequte(wsv: &StateTransaction) -> Result<(), ()> { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/not_execute.stderr b/telemetry/derive/tests/ui_fail/not_execute.stderr index 7f4ab728dae..19690c55759 100644 --- a/telemetry/derive/tests/ui_fail/not_execute.stderr +++ b/telemetry/derive/tests/ui_fail/not_execute.stderr @@ -1,5 +1,5 @@ error: Function should be an `impl execute` --> tests/ui_fail/not_execute.rs:4:4 | -4 | fn exequte(wsv: &WorldStateView) -> Result<(), ()> { +4 | fn exequte(wsv: &StateTransaction) -> Result<(), ()> { | ^^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/not_return_result.rs b/telemetry/derive/tests/ui_fail/not_return_result.rs index ca779d8e5ec..e92afcfb116 100644 --- a/telemetry/derive/tests/ui_fail/not_return_result.rs +++ b/telemetry/derive/tests/ui_fail/not_return_result.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] -fn execute(_wsv: &WorldStateView) -> iroha_core::RESULT { +fn execute(_wsv: &StateTransaction) -> iroha_core::RESULT { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/not_return_result.stderr b/telemetry/derive/tests/ui_fail/not_return_result.stderr index 6652f72014d..fe1934cde9d 100644 --- a/telemetry/derive/tests/ui_fail/not_return_result.stderr +++ b/telemetry/derive/tests/ui_fail/not_return_result.stderr @@ -1,5 +1,5 @@ error: Should return `Result`. Found RESULT - --> tests/ui_fail/not_return_result.rs:4:50 + --> tests/ui_fail/not_return_result.rs:4:52 | -4 | fn execute(_wsv: &WorldStateView) -> iroha_core::RESULT { - | ^^^^^^ +4 | fn execute(_wsv: &StateTransaction) -> iroha_core::RESULT { + | ^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/return_nothing.rs b/telemetry/derive/tests/ui_fail/return_nothing.rs index 419325ac0ba..50a35bc3207 100644 --- a/telemetry/derive/tests/ui_fail/return_nothing.rs +++ b/telemetry/derive/tests/ui_fail/return_nothing.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] -fn execute(wsv: &WorldStateView) { +fn execute(wsv: &StateTransaction) { Ok(()) } diff --git a/telemetry/derive/tests/ui_fail/trailing_plus.rs b/telemetry/derive/tests/ui_fail/trailing_plus.rs index 3034f0c7f1d..385da3213bd 100644 --- a/telemetry/derive/tests/ui_fail/trailing_plus.rs +++ b/telemetry/derive/tests/ui_fail/trailing_plus.rs @@ -1,7 +1,7 @@ use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing"+)] -fn execute(wsv: &WorldStateView) -> Result<(), ()> { +fn execute(wsv: &StateTransaction) -> Result<(), ()> { Ok(()) }