Skip to content

Commit

Permalink
Added tracked parachains filter (paritytech#1432)
Browse files Browse the repository at this point in the history
* added trackerd parachains filter + changed some docs

* fix compilation
  • Loading branch information
svyatonik authored and TomaszWaszczyk committed May 27, 2024
1 parent a77e2e6 commit 38faee6
Show file tree
Hide file tree
Showing 6 changed files with 113 additions and 23 deletions.
1 change: 1 addition & 0 deletions bridges/bin/millau/runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -520,6 +520,7 @@ pub type WitRialtoParachainsInstance = ();
impl pallet_bridge_parachains::Config<WitRialtoParachainsInstance> for Runtime {
type BridgesGrandpaPalletInstance = RialtoGrandpaInstance;
type ParasPalletName = RialtoParasPalletName;
type TrackedParachains = frame_support::traits::Everything;
type HeadsToKeep = HeadersToKeep;
}

Expand Down
2 changes: 2 additions & 0 deletions bridges/modules/grandpa/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ pub mod pallet {
/// The setting is there to prevent growing the on-chain state indefinitely. Note
/// the setting does not relate to block numbers - we will simply keep as much items
/// in the storage, so it doesn't guarantee any fixed timeframe for finality headers.
///
/// Incautious change of this constant may lead to orphan entries in the runtime storage.
#[pallet::constant]
type HeadersToKeep: Get<u32>;

Expand Down
2 changes: 1 addition & 1 deletion bridges/modules/messages/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ parameter_types! {
pub const MaxUnrewardedRelayerEntriesAtInboundLane: u64 = 16;
pub const MaxUnconfirmedMessagesAtInboundLane: u64 = 32;
pub storage TokenConversionRate: FixedU128 = 1.into();
pub const TestBridgedChainId: bp_runtime::ChainId = *b"test";
pub const TestBridgedChainId: bp_runtime::ChainId = *b"test";
}

#[derive(Debug, Clone, Encode, Decode, PartialEq, Eq, TypeInfo)]
Expand Down
100 changes: 81 additions & 19 deletions bridges/modules/parachains/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@

use bp_parachains::parachain_head_storage_key_at_source;
use bp_polkadot_core::parachains::{ParaHash, ParaHasher, ParaHead, ParaHeadsProof, ParaId};
use bp_runtime::StorageProofError;
use codec::{Decode, Encode};
use frame_support::RuntimeDebug;
use frame_support::{traits::Contains, RuntimeDebug};
use scale_info::TypeInfo;
use sp_runtime::traits::Header as HeaderT;
use sp_std::vec::Vec;
Expand Down Expand Up @@ -90,11 +91,20 @@ pub mod pallet {
#[pallet::constant]
type ParasPalletName: Get<&'static str>;

/// Set of parachains that are tracked by this pallet.
///
/// The set may be extended easily, without requiring any runtime upgrades. Removing tracked
/// parachain requires special handling - pruning existing heads and cleaning related data
/// structures.
type TrackedParachains: Contains<ParaId>;

/// Maximal number of single parachain heads to keep in the storage.
///
/// The setting is there to prevent growing the on-chain state indefinitely. Note
/// the setting does not relate to parachain block numbers - we will simply keep as much
/// items in the storage, so it doesn't guarantee any fixed timeframe for heads.
///
/// Incautious change of this constant may lead to orphan entries in the runtime storage.
#[pallet::constant]
type HeadsToKeep: Get<u32>;
}
Expand Down Expand Up @@ -156,17 +166,40 @@ pub mod pallet {
sp_trie::StorageProof::new(parachain_heads_proof),
move |storage| {
for parachain in parachains {
// TODO: https://github.com/paritytech/parity-bridges-common/issues/1393
// if we're not tracking this parachain, we'll just ignore its head proof here
if !T::TrackedParachains::contains(&parachain) {
log::trace!(
target: "runtime::bridge-parachains",
"The head of parachain {:?} has been provided, but it is not tracked by the pallet",
parachain,
);
continue;
}

let parachain_head = match Pallet::<T, I>::read_parachain_head(&storage, parachain) {
Some(parachain_head) => parachain_head,
None => {
Ok(Some(parachain_head)) => parachain_head,
Ok(None) => {
log::trace!(
target: "runtime::bridge-parachains",
"The head of parachain {:?} is None. {}",
parachain,
if BestParaHeads::<T, I>::contains_key(&parachain) {
"Looks like it is not yet registered at the source relay chain"
} else {
"Looks like it has been deregistered from the source relay chain"
},
);
continue;
},
Err(e) => {
log::trace!(
target: "runtime::bridge-parachains",
"The head of parachain {:?} has been declared, but is missing from the proof",
"The read of head of parachain {:?} has failed: {:?}",
parachain,
e,
);
continue;
}
},
};

let _: Result<_, ()> = BestParaHeads::<T, I>::try_mutate(parachain, |stored_best_head| {
Expand All @@ -183,14 +216,6 @@ pub mod pallet {
)
.map_err(|_| Error::<T, I>::InvalidStorageProof)?;

// TODO: there may be parachains we are not interested in - so we only need to accept
// intersection of `parachains-interesting-to-us` and `parachains`
// https://github.com/paritytech/parity-bridges-common/issues/1392

// TODO: if some parachain is no more interesting to us, we should start pruning its
// heads
// https://github.com/paritytech/parity-bridges-common/issues/1392

Ok(())
}
}
Expand Down Expand Up @@ -232,12 +257,10 @@ pub mod pallet {
fn read_parachain_head(
storage: &bp_runtime::StorageProofChecker<RelayBlockHasher>,
parachain: ParaId,
) -> Option<ParaHead> {
) -> Result<Option<ParaHead>, StorageProofError> {
let parachain_head_key =
parachain_head_storage_key_at_source(T::ParasPalletName::get(), parachain);
let parachain_head = storage.read_value(parachain_head_key.0.as_ref()).ok()??;
let parachain_head = ParaHead::decode(&mut &parachain_head[..]).ok()?;
Some(parachain_head)
storage.read_and_decode_value(parachain_head_key.0.as_ref())
}

/// Try to update parachain head.
Expand Down Expand Up @@ -327,7 +350,9 @@ pub mod pallet {
#[cfg(test)]
mod tests {
use super::*;
use crate::mock::{run_test, test_relay_header, Origin, TestRuntime, PARAS_PALLET_NAME};
use crate::mock::{
run_test, test_relay_header, Origin, TestRuntime, PARAS_PALLET_NAME, UNTRACKED_PARACHAIN_ID,
};

use bp_test_utils::{authority_list, make_default_justification};
use frame_support::{assert_noop, assert_ok, traits::OnInitialize};
Expand Down Expand Up @@ -510,6 +535,43 @@ mod tests {
});
}

#[test]
fn ignores_untracked_parachain() {
let (state_root, proof) = prepare_parachain_heads_proof(vec![
(1, head_data(1, 5)),
(UNTRACKED_PARACHAIN_ID, head_data(1, 5)),
(2, head_data(1, 5)),
]);
run_test(|| {
// start with relay block #0 and try to import head#5 of parachain#1 and untracked
// parachain
initialize(state_root);
assert_ok!(Pallet::<TestRuntime>::submit_parachain_heads(
Origin::signed(1),
test_relay_header(0, state_root).hash(),
vec![ParaId(1), ParaId(UNTRACKED_PARACHAIN_ID), ParaId(2)],
proof,
));
assert_eq!(
BestParaHeads::<TestRuntime>::get(ParaId(1)),
Some(BestParaHead {
at_relay_block_number: 0,
head_hash: head_data(1, 5).hash(),
next_imported_hash_position: 1,
})
);
assert_eq!(BestParaHeads::<TestRuntime>::get(ParaId(UNTRACKED_PARACHAIN_ID)), None,);
assert_eq!(
BestParaHeads::<TestRuntime>::get(ParaId(2)),
Some(BestParaHead {
at_relay_block_number: 0,
head_hash: head_data(1, 5).hash(),
next_imported_hash_position: 1,
})
);
});
}

#[test]
fn does_nothing_when_already_imported_this_head_at_previous_relay_header() {
let (state_root, proof) = prepare_parachain_heads_proof(vec![(1, head_data(1, 0))]);
Expand Down
6 changes: 5 additions & 1 deletion bridges/modules/parachains/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@
// You should have received a copy of the GNU General Public License
// along with Parity Bridges Common. If not, see <http://www.gnu.org/licenses/>.

use bp_polkadot_core::parachains::ParaId;
use bp_runtime::Chain;
use frame_support::{construct_runtime, parameter_types, weights::Weight};
use frame_support::{construct_runtime, parameter_types, traits::IsInVec, weights::Weight};
use sp_runtime::{
testing::{Header, H256},
traits::{BlakeTwo256, Header as HeaderT, IdentityLookup},
Expand All @@ -34,6 +35,7 @@ type Block = frame_system::mocking::MockBlock<TestRuntime>;
type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<TestRuntime>;

pub const PARAS_PALLET_NAME: &str = "Paras";
pub const UNTRACKED_PARACHAIN_ID: u32 = 10;

construct_runtime! {
pub enum TestRuntime where
Expand Down Expand Up @@ -106,11 +108,13 @@ impl pallet_bridge_grandpa::Config<pallet_bridge_grandpa::Instance2> for TestRun
parameter_types! {
pub const HeadsToKeep: u32 = 4;
pub const ParasPalletName: &'static str = PARAS_PALLET_NAME;
pub GetTenFirstParachains: Vec<ParaId> = (0..10).map(ParaId).collect();
}

impl pallet_bridge_parachains::Config for TestRuntime {
type BridgesGrandpaPalletInstance = pallet_bridge_grandpa::Instance1;
type ParasPalletName = ParasPalletName;
type TrackedParachains = IsInVec<GetTenFirstParachains>;
type HeadsToKeep = HeadsToKeep;
}

Expand Down
25 changes: 23 additions & 2 deletions bridges/primitives/runtime/src/storage_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

//! Logic for checking Substrate storage proofs.

use codec::Decode;
use hash_db::{HashDB, Hasher, EMPTY_PREFIX};
use sp_runtime::RuntimeDebug;
use sp_std::vec::Vec;
Expand Down Expand Up @@ -50,25 +51,37 @@ where
}

/// Reads a value from the available subset of storage. If the value cannot be read due to an
/// incomplete or otherwise invalid proof, this returns an error.
/// incomplete or otherwise invalid proof, this function returns an error.
pub fn read_value(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Error> {
// LayoutV1 or LayoutV0 is identical for proof that only read values.
read_trie_value::<LayoutV1<H>, _>(&self.db, &self.root, key)
.map_err(|_| Error::StorageValueUnavailable)
}

/// Reads and decodes a value from the available subset of storage. If the value cannot be read
/// due to an incomplete or otherwise invalid proof, this function returns an error. If value is
/// read, but decoding fails, this function returns an error.
pub fn read_and_decode_value<T: Decode>(&self, key: &[u8]) -> Result<Option<T>, Error> {
self.read_value(key).and_then(|v| {
v.map(|v| T::decode(&mut &v[..]).map_err(Error::StorageValueDecodeFailed))
.transpose()
})
}
}

#[derive(Eq, RuntimeDebug, PartialEq)]
pub enum Error {
StorageRootMismatch,
StorageValueUnavailable,
StorageValueDecodeFailed(codec::Error),
}

/// Return valid storage proof and state root.
///
/// NOTE: This should only be used for **testing**.
#[cfg(feature = "std")]
pub fn craft_valid_storage_proof() -> (sp_core::H256, StorageProof) {
use codec::Encode;
use sp_state_machine::{backend::Backend, prove_read, InMemoryBackend};

let state_version = sp_runtime::StateVersion::default();
Expand All @@ -79,14 +92,15 @@ pub fn craft_valid_storage_proof() -> (sp_core::H256, StorageProof) {
(None, vec![(b"key1".to_vec(), Some(b"value1".to_vec()))]),
(None, vec![(b"key2".to_vec(), Some(b"value2".to_vec()))]),
(None, vec![(b"key3".to_vec(), Some(b"value3".to_vec()))]),
(None, vec![(b"key4".to_vec(), Some((42u64, 42u32, 42u16, 42u8).encode()))]),
// Value is too big to fit in a branch node
(None, vec![(b"key11".to_vec(), Some(vec![0u8; 32]))]),
],
state_version,
));
let root = backend.storage_root(std::iter::empty(), state_version).0;
let proof = StorageProof::new(
prove_read(backend, &[&b"key1"[..], &b"key2"[..], &b"key22"[..]])
prove_read(backend, &[&b"key1"[..], &b"key2"[..], &b"key4"[..], &b"key22"[..]])
.unwrap()
.iter_nodes(),
);
Expand All @@ -97,6 +111,7 @@ pub fn craft_valid_storage_proof() -> (sp_core::H256, StorageProof) {
#[cfg(test)]
pub mod tests {
use super::*;
use codec::Encode;

#[test]
fn storage_proof_check() {
Expand All @@ -107,8 +122,14 @@ pub mod tests {
<StorageProofChecker<sp_core::Blake2Hasher>>::new(root, proof.clone()).unwrap();
assert_eq!(checker.read_value(b"key1"), Ok(Some(b"value1".to_vec())));
assert_eq!(checker.read_value(b"key2"), Ok(Some(b"value2".to_vec())));
assert_eq!(checker.read_value(b"key4"), Ok(Some((42u64, 42u32, 42u16, 42u8).encode())));
assert_eq!(checker.read_value(b"key11111"), Err(Error::StorageValueUnavailable));
assert_eq!(checker.read_value(b"key22"), Ok(None));
assert_eq!(checker.read_and_decode_value(b"key4"), Ok(Some((42u64, 42u32, 42u16, 42u8))),);
assert!(matches!(
checker.read_and_decode_value::<[u8; 64]>(b"key4"),
Err(Error::StorageValueDecodeFailed(_)),
));

// checking proof against invalid commitment fails
assert_eq!(
Expand Down

0 comments on commit 38faee6

Please sign in to comment.