From fca8b85cf8aaab4f387b99abe4d2252768a36698 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Fri, 8 Dec 2023 13:45:54 +0300 Subject: [PATCH] [fix] #0000: On-chain predictable iteration order MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- Cargo.lock | 27 +++++++++-------- core/Cargo.toml | 2 ++ core/clippy.toml | 1 + core/src/block_sync.rs | 1 + core/src/lib.rs | 11 +++---- core/src/query/store.rs | 6 ++-- core/src/queue.rs | 4 +-- core/src/smartcontracts/isi/triggers/set.rs | 33 +++++++++++---------- core/src/smartcontracts/wasm.rs | 9 +++--- core/src/sumeragi/mod.rs | 5 +++- core/src/sumeragi/network_topology.rs | 4 +-- core/src/sumeragi/view_change.rs | 4 +-- core/src/wsv.rs | 11 +++---- data_model/clippy.toml | 1 + 14 files changed, 63 insertions(+), 56 deletions(-) create mode 100644 core/clippy.toml create mode 100644 data_model/clippy.toml diff --git a/Cargo.lock b/Cargo.lock index 9dadaa730b..c8314d5206 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1773,7 +1773,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" dependencies = [ "fallible-iterator", - "indexmap 2.0.2", + "indexmap 2.1.0", "stable_deref_trait", ] @@ -2595,9 +2595,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", "hashbrown 0.14.1", @@ -2806,6 +2806,7 @@ dependencies = [ "eyre", "futures", "hex", + "indexmap 2.1.0", "iroha_config", "iroha_crypto", "iroha_data_model", @@ -3851,7 +3852,7 @@ checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "crc32fast", "hashbrown 0.14.1", - "indexmap 2.0.2", + "indexmap 2.1.0", "memchr", ] @@ -4139,7 +4140,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.0.2", + "indexmap 2.1.0", ] [[package]] @@ -4910,7 +4911,7 @@ version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -5611,7 +5612,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "toml_datetime", "winnow", ] @@ -6206,7 +6207,7 @@ version = "0.116.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a58e28b80dd8340cb07b8242ae654756161f6fc8d0038123d679b7b99964fa50" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "semver", ] @@ -6222,7 +6223,7 @@ dependencies = [ "bumpalo", "cfg-if", "fxprof-processed-profile", - "indexmap 2.0.2", + "indexmap 2.1.0", "libc", "log", "object", @@ -6347,7 +6348,7 @@ dependencies = [ "anyhow", "cranelift-entity", "gimli", - "indexmap 2.0.2", + "indexmap 2.1.0", "log", "object", "serde", @@ -6432,7 +6433,7 @@ dependencies = [ "anyhow", "cc", "cfg-if", - "indexmap 2.0.2", + "indexmap 2.1.0", "libc", "log", "mach", @@ -6484,7 +6485,7 @@ checksum = "41786c7bbbf250c0e685b291323b50c6bb65f0505a2c0b4f0b598c740f13f185" dependencies = [ "anyhow", "heck", - "indexmap 2.0.2", + "indexmap 2.1.0", "wit-parser", ] @@ -6739,7 +6740,7 @@ checksum = "15df6b7b28ce94b8be39d8df5cb21a08a4f3b9f33b631aedb4aa5776f785ead3" dependencies = [ "anyhow", "id-arena", - "indexmap 2.0.2", + "indexmap 2.1.0", "log", "semver", "serde", diff --git a/core/Cargo.toml b/core/Cargo.toml index 73a9f5c63b..def9118065 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -68,7 +68,9 @@ displaydoc = { workspace = true } wasmtime = { workspace = true } parking_lot = { workspace = true, features = ["deadlock_detection"] } derive_more = { workspace = true } + uuid = { version = "1.4.1", features = ["v4"] } +indexmap = "2.1.0" [dev-dependencies] criterion = { workspace = true } diff --git a/core/clippy.toml b/core/clippy.toml new file mode 100644 index 0000000000..6fa181161e --- /dev/null +++ b/core/clippy.toml @@ -0,0 +1 @@ +disallowed-types = [] diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 22adcfc2ef..bf74dfcbbf 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -84,6 +84,7 @@ impl BlockSynchronizer { } /// Get a random online peer. + #[allow(clippy::disallowed_types)] pub fn random_peer(peers: &std::collections::HashSet) -> Option { use rand::{seq::IteratorRandom, SeedableRng}; diff --git a/core/src/lib.rs b/core/src/lib.rs index e0c6109e31..ecf7faf105 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -15,9 +15,10 @@ pub mod tx; pub mod wsv; use core::time::Duration; -use std::collections::{BTreeSet, HashMap, HashSet}; +use std::collections::BTreeSet; use gossiper::TransactionGossip; +use indexmap::{IndexMap, IndexSet}; use iroha_data_model::{permission::Permissions, prelude::*}; use iroha_primitives::unique_vec::UniqueVec; use parity_scale_codec::{Decode, Encode}; @@ -38,16 +39,16 @@ pub type IrohaNetwork = iroha_p2p::NetworkHandle; pub type PeersIds = UniqueVec; /// Parameters set. -pub type Parameters = HashSet; +pub type Parameters = IndexSet; /// API to work with collections of [`DomainId`] : [`Domain`] mappings. -pub type DomainsMap = HashMap; +pub type DomainsMap = IndexMap; /// API to work with a collections of [`RoleId`]: [`Role`] mappings. -pub type RolesMap = HashMap; +pub type RolesMap = IndexMap; /// API to work with a collections of [`AccountId`] [`Permissions`] mappings. -pub type PermissionTokensMap = HashMap; +pub type PermissionTokensMap = IndexMap; /// API to work with a collections of [`AccountId`] to [`RoleId`] mappings. pub type AccountRolesSet = BTreeSet; diff --git a/core/src/query/store.rs b/core/src/query/store.rs index 92684de1e0..ae1957da79 100644 --- a/core/src/query/store.rs +++ b/core/src/query/store.rs @@ -2,11 +2,11 @@ use std::{ cmp::Ordering, - collections::HashMap, num::NonZeroU64, time::{Duration, Instant}, }; +use indexmap::IndexMap; use iroha_config::live_query_store::Configuration; use iroha_data_model::{ asset::AssetValue, @@ -67,7 +67,7 @@ type LiveQuery = Batched>; /// Clients can handle their queries using [`LiveQueryStoreHandle`] #[derive(Debug)] pub struct LiveQueryStore { - queries: HashMap, + queries: IndexMap, query_idle_time: Duration, } @@ -75,7 +75,7 @@ impl LiveQueryStore { /// Construct [`LiveQueryStore`] from configuration. pub fn from_configuration(cfg: Configuration) -> Self { Self { - queries: HashMap::default(), + queries: IndexMap::new(), query_idle_time: Duration::from_millis(cfg.query_idle_time_ms.into()), } } diff --git a/core/src/queue.rs b/core/src/queue.rs index bc3d860cd1..e19e46f2ba 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -1,10 +1,10 @@ //! Module with queue actor use core::time::Duration; -use std::collections::HashSet; use crossbeam_queue::ArrayQueue; use dashmap::{mapref::entry::Entry, DashMap}; use eyre::{Report, Result}; +use indexmap::IndexSet; use iroha_config::queue::Configuration; use iroha_crypto::HashOf; use iroha_data_model::{account::AccountId, transaction::prelude::*}; @@ -326,7 +326,7 @@ impl Queue { self.pop_from_queue(&mut seen_queue, wsv, &mut expired_transactions_queue) }); - let transactions_hashes: HashSet> = + let transactions_hashes: IndexSet> = transactions.iter().map(|tx| tx.payload().hash()).collect(); let txs = txs_from_queue .filter(|tx| !transactions_hashes.contains(&tx.payload().hash())) diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 624fc2a6ac..b32e49190e 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -9,8 +9,9 @@ //! trigger hooks. use core::cmp::min; -use std::{collections::HashMap, fmt}; +use std::fmt; +use indexmap::IndexMap; use iroha_crypto::HashOf; use iroha_data_model::{ events::Filter as EventFilter, @@ -138,17 +139,17 @@ impl + Clone> LoadedActionTrait for Loaded #[derive(Debug, Default)] pub struct Set { /// Triggers using [`DataEventFilter`] - data_triggers: HashMap>, + data_triggers: IndexMap>, /// Triggers using [`PipelineEventFilter`] - pipeline_triggers: HashMap>, + pipeline_triggers: IndexMap>, /// Triggers using [`TimeEventFilter`] - time_triggers: HashMap>, + time_triggers: IndexMap>, /// Triggers using [`ExecuteTriggerEventFilter`] - by_call_triggers: HashMap>, + by_call_triggers: IndexMap>, /// Trigger ids with type of events they process - ids: HashMap, + ids: IndexMap, /// Original [`WasmSmartContract`]s by [`TriggerId`] for querying purposes. - original_contracts: HashMap, WasmSmartContract>, + original_contracts: IndexMap, WasmSmartContract>, /// List of actions that should be triggered by events provided by `handle_*` methods. /// Vector is used to save the exact triggers order. matched_ids: Vec<(Event, TriggerId)>, @@ -157,14 +158,14 @@ pub struct Set { /// Helper struct for serializing triggers. struct TriggersWithContext<'s, F> { /// Triggers being serialized - triggers: &'s HashMap>, + triggers: &'s IndexMap>, /// Containing Set, used for looking up origignal [`WasmSmartContract`]s /// during serialization. set: &'s Set, } impl<'s, F> TriggersWithContext<'s, F> { - fn new(triggers: &'s HashMap>, set: &'s Set) -> Self { + fn new(triggers: &'s IndexMap>, set: &'s Set) -> Self { Self { triggers, set } } } @@ -236,7 +237,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { while let Some(key) = map.next_key::()? { match key.as_str() { "data_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_data_trigger(self.loader.engine, Trigger::new(id, action)) @@ -244,7 +245,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "pipeline_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_pipeline_trigger( @@ -255,7 +256,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "time_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_time_trigger(self.loader.engine, Trigger::new(id, action)) @@ -263,7 +264,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "by_call_triggers" => { - let triggers: HashMap> = + let triggers: IndexMap> = map.next_value()?; for (id, action) in triggers { set.add_by_call_trigger( @@ -387,7 +388,7 @@ impl Set { engine: &wasmtime::Engine, trigger: Trigger, event_type: TriggeringEventType, - map: impl FnOnce(&mut Self) -> &mut HashMap>, + map: impl FnOnce(&mut Self) -> &mut IndexMap>, ) -> Result { if self.contains(trigger.id()) { return Ok(false); @@ -816,8 +817,8 @@ impl Set { /// Remove actions with zero execution count from `triggers` fn remove_zeros( - ids: &mut HashMap, - triggers: &mut HashMap>, + ids: &mut IndexMap, + triggers: &mut IndexMap>, ) { let to_remove: Vec = triggers .iter() diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 0d0116dba2..a7c0b8b757 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -328,9 +328,8 @@ impl LimitsExecutor { pub mod state { //! All supported states for [`Runtime`](super::Runtime) - use std::collections::HashSet; - use derive_more::Constructor; + use indexmap::IndexSet; use super::*; @@ -360,7 +359,7 @@ pub mod state { pub(super) store_limits: StoreLimits, /// Span inside of which all logs are recorded for this smart contract pub(super) log_span: Span, - pub(super) executed_queries: HashSet, + pub(super) executed_queries: IndexSet, /// Borrowed [`WorldStateView`] kind pub(super) wsv: W, /// Concrete state for specific executable @@ -380,14 +379,14 @@ pub mod state { authority, store_limits: store_limits_from_config(&config), log_span, - executed_queries: HashSet::new(), + executed_queries: IndexSet::new(), wsv, specific_state, } } /// Take executed queries leaving an empty set - pub fn take_executed_queries(&mut self) -> HashSet { + pub fn take_executed_queries(&mut self) -> IndexSet { std::mem::take(&mut self.executed_queries) } } diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index b6f3c7391f..7066d5e9e0 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -110,7 +110,10 @@ impl SumeragiHandle { pub fn update_metrics(&self) -> Result<()> { let online_peers_count: u64 = self .network - .online_peers(std::collections::HashSet::len) + .online_peers( + #[allow(clippy::disallowed_types)] + std::collections::HashSet::len, + ) .try_into() .expect("casting usize to u64"); diff --git a/core/src/sumeragi/network_topology.rs b/core/src/sumeragi/network_topology.rs index 05e92157d3..4ba77806e4 100644 --- a/core/src/sumeragi/network_topology.rs +++ b/core/src/sumeragi/network_topology.rs @@ -1,7 +1,7 @@ //! Structures formalising the peer topology (e.g. which peers have which predefined roles). -use std::collections::HashSet; use derive_more::Display; +use indexmap::IndexSet; use iroha_crypto::{PublicKey, SignatureOf}; use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use iroha_logger::trace; @@ -88,7 +88,7 @@ impl Topology { roles: &[Role], signatures: I, ) -> Vec> { - let mut public_keys: HashSet<&PublicKey> = HashSet::with_capacity(self.ordered_peers.len()); + let mut public_keys = IndexSet::with_capacity(self.ordered_peers.len()); for role in roles { match (role, self.is_non_empty(), self.is_consensus_required()) { (Role::Leader, Some(topology), _) => { diff --git a/core/src/sumeragi/view_change.rs b/core/src/sumeragi/view_change.rs index 11f24b90cf..0b0ed73032 100644 --- a/core/src/sumeragi/view_change.rs +++ b/core/src/sumeragi/view_change.rs @@ -1,9 +1,9 @@ //! Structures related to proofs and reasons of view changes. //! Where view change is a process of changing topology due to some faulty network behavior. -use std::collections::HashSet; use derive_more::{Deref, DerefMut}; use eyre::Result; +use indexmap::IndexSet; use iroha_crypto::{HashOf, KeyPair, PublicKey, SignatureOf, SignaturesOf}; use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use parity_scale_codec::{Decode, Encode}; @@ -76,7 +76,7 @@ impl SignedProof { /// Verify if the proof is valid, given the peers in `topology`. fn verify(&self, peers: &[PeerId], max_faults: usize) -> bool { - let peer_public_keys: HashSet<&PublicKey> = + let peer_public_keys: IndexSet<&PublicKey> = peers.iter().map(|peer_id| &peer_id.public_key).collect(); let valid_count = self diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 13a899845e..29274a3323 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -1,15 +1,12 @@ //! This module provides the [`WorldStateView`] — an in-memory representation of the current blockchain //! state. use std::{ - borrow::Borrow, - collections::{BTreeSet, HashMap}, - fmt::Debug, - marker::PhantomData, - sync::Arc, + borrow::Borrow, collections::BTreeSet, fmt::Debug, marker::PhantomData, sync::Arc, time::Duration, }; use eyre::Result; +use indexmap::IndexMap; use iroha_config::{ base::proxy::Builder, wsv::{Configuration, ConfigurationProxy}, @@ -277,7 +274,7 @@ pub struct WorldStateView { /// Blockchain. pub block_hashes: Vec>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: HashMap, u64>, + pub transactions: IndexMap, u64>, /// Buffer containing events generated during `WorldStateView::apply`. Renewed on every block commit. #[serde(skip)] pub events_buffer: Vec, @@ -942,7 +939,7 @@ impl WorldStateView { Self { world, config, - transactions: HashMap::new(), + transactions: IndexMap::new(), block_hashes: Vec::new(), events_buffer: Vec::new(), new_tx_amounts: Arc::new(Mutex::new(Vec::new())), diff --git a/data_model/clippy.toml b/data_model/clippy.toml new file mode 100644 index 0000000000..ad9bd114be --- /dev/null +++ b/data_model/clippy.toml @@ -0,0 +1 @@ +disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"]