Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Squashed 'bridges/' changes from dcaec27aa..91e66cfb9
Browse files Browse the repository at this point in the history
91e66cfb9 Fix clippy issues (#1884)
0bd77f457 Reject storage proofs with unused nodes: begin (#1878)
77a3672f9 Refund extra proof bytes in message delivery transaction (#1864)

git-subtree-dir: bridges
git-subtree-split: 91e66cfb99c1a7b247e435515dd0f62b4058974e
  • Loading branch information
bkontur committed Feb 16, 2023
1 parent be711f1 commit 1bb5161
Show file tree
Hide file tree
Showing 19 changed files with 378 additions and 105 deletions.
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion bin/millau/runtime/src/rialto_messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ mod tests {
RialtoGrandpaInstance,
WithRialtoMessagesInstance,
WithRialtoMessageBridge,
bp_millau::Millau,
>(AssertCompleteBridgeConstants {
this_chain_constants: AssertChainConstants {
block_length: bp_millau::BlockLength::get(),
Expand Down
1 change: 0 additions & 1 deletion bin/rialto/runtime/src/millau_messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@ mod tests {
MillauGrandpaInstance,
WithMillauMessagesInstance,
WithMillauMessageBridge,
bp_rialto::Rialto,
>(AssertCompleteBridgeConstants {
this_chain_constants: AssertChainConstants {
block_length: bp_rialto::BlockLength::get(),
Expand Down
8 changes: 3 additions & 5 deletions bin/runtime-common/src/integrity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,9 @@ pub struct AssertChainConstants {
///
/// 1) block weight limits are matching;
/// 2) block size limits are matching.
pub fn assert_chain_constants<R, C>(params: AssertChainConstants)
pub fn assert_chain_constants<R>(params: AssertChainConstants)
where
R: frame_system::Config,
C: Chain,
{
// we don't check runtime version here, because in our case we'll be building relay from one
// repo and runtime will live in another repo, along with outdated relay version. To avoid
Expand Down Expand Up @@ -274,17 +273,16 @@ pub struct AssertCompleteBridgeConstants<'a> {

/// All bridge-related constants tests for the complete standard messages bridge (i.e. with bridge
/// GRANDPA and messages pallets deployed).
pub fn assert_complete_bridge_constants<R, GI, MI, B, This>(params: AssertCompleteBridgeConstants)
pub fn assert_complete_bridge_constants<R, GI, MI, B>(params: AssertCompleteBridgeConstants)
where
R: frame_system::Config
+ pallet_bridge_grandpa::Config<GI>
+ pallet_bridge_messages::Config<MI>,
GI: 'static,
MI: 'static,
B: MessageBridge,
This: Chain,
{
assert_chain_constants::<R, This>(params.this_chain_constants);
assert_chain_constants::<R>(params.this_chain_constants);
assert_bridge_grandpa_pallet_constants::<R, GI>();
assert_bridge_messages_pallet_constants::<R, MI>(params.messages_pallet_constants);
assert_bridge_pallet_names::<B, R, GI, MI>(params.pallet_names);
Expand Down
68 changes: 55 additions & 13 deletions bin/runtime-common/src/messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
//! pallet is used to dispatch incoming messages. Message identified by a tuple
//! of to elements - message lane id and message nonce.

pub use bp_runtime::{UnderlyingChainOf, UnderlyingChainProvider};

use bp_header_chain::{HeaderChain, HeaderChainError};
use bp_messages::{
source_chain::{LaneMessageVerifier, TargetHeaderChain},
Expand All @@ -28,14 +30,15 @@ use bp_messages::{
},
InboundLaneData, LaneId, Message, MessageKey, MessageNonce, MessagePayload, OutboundLaneData,
};
use bp_runtime::{messages::MessageDispatchResult, Chain, ChainId, Size, StorageProofChecker};
pub use bp_runtime::{UnderlyingChainOf, UnderlyingChainProvider};
use bp_runtime::{
messages::MessageDispatchResult, Chain, ChainId, RawStorageProof, Size, StorageProofChecker,
StorageProofError,
};
use codec::{Decode, DecodeLimit, Encode};
use frame_support::{traits::Get, weights::Weight, RuntimeDebug};
use hash_db::Hasher;
use scale_info::TypeInfo;
use sp_std::{convert::TryFrom, fmt::Debug, marker::PhantomData, vec::Vec};
use sp_trie::StorageProof;
use xcm::latest::prelude::*;

/// Bidirectional message bridge.
Expand Down Expand Up @@ -97,9 +100,6 @@ pub type OriginOf<C> = <C as ThisChainWithMessages>::RuntimeOrigin;
/// Type of call that is used on this chain.
pub type CallOf<C> = <C as ThisChainWithMessages>::RuntimeCall;

/// Raw storage proof type (just raw trie nodes).
pub type RawStorageProof = Vec<Vec<u8>>;

/// Sub-module that is declaring types required for processing This -> Bridged chain messages.
pub mod source {
use super::*;
Expand Down Expand Up @@ -274,8 +274,8 @@ pub mod source {
proof;
B::BridgedHeaderChain::parse_finalized_storage_proof(
bridged_header_hash,
StorageProof::new(storage_proof),
|storage| {
storage_proof,
|mut storage| {
// Messages delivery proof is just proof of single storage key read => any error
// is fatal.
let storage_inbound_lane_data_key =
Expand All @@ -290,6 +290,11 @@ pub mod source {
let inbound_lane_data = InboundLaneData::decode(&mut &raw_inbound_lane_data[..])
.map_err(|_| "Failed to decode inbound lane state from the proof")?;

// check that the storage proof doesn't have any untouched trie nodes
storage
.ensure_no_unused_nodes()
.map_err(|_| "Messages delivery proof has unused trie nodes")?;

Ok((lane, inbound_lane_data))
},
)
Expand Down Expand Up @@ -608,9 +613,9 @@ pub mod target {

B::BridgedHeaderChain::parse_finalized_storage_proof(
bridged_header_hash,
StorageProof::new(storage_proof),
storage_proof,
|storage| {
let parser =
let mut parser =
StorageProofCheckerAdapter::<_, B> { storage, _dummy: Default::default() };

// receiving proofs where end < begin is ok (if proof includes outbound lane state)
Expand Down Expand Up @@ -661,6 +666,12 @@ pub mod target {
return Err(MessageProofError::Empty)
}

// check that the storage proof doesn't have any untouched trie nodes
parser
.storage
.ensure_no_unused_nodes()
.map_err(MessageProofError::StorageProof)?;

// We only support single lane messages in this generated_schema
let mut proved_messages = ProvedMessages::new();
proved_messages.insert(lane, proved_lane_messages);
Expand All @@ -686,6 +697,8 @@ pub mod target {
FailedToDecodeMessage,
/// Failed to decode outbound lane data from the proof.
FailedToDecodeOutboundLaneState,
/// Storage proof related error.
StorageProof(StorageProofError),
}

impl From<MessageProofError> for &'static str {
Expand All @@ -700,6 +713,7 @@ pub mod target {
"Failed to decode message from the proof",
MessageProofError::FailedToDecodeOutboundLaneState =>
"Failed to decode outbound lane data from the proof",
MessageProofError::StorageProof(_) => "Invalid storage proof",
}
}
}
Expand All @@ -710,15 +724,15 @@ pub mod target {
}

impl<H: Hasher, B: MessageBridge> StorageProofCheckerAdapter<H, B> {
fn read_raw_outbound_lane_data(&self, lane_id: &LaneId) -> Option<Vec<u8>> {
fn read_raw_outbound_lane_data(&mut self, lane_id: &LaneId) -> Option<Vec<u8>> {
let storage_outbound_lane_data_key = bp_messages::storage_keys::outbound_lane_data_key(
B::BRIDGED_MESSAGES_PALLET_NAME,
lane_id,
);
self.storage.read_value(storage_outbound_lane_data_key.0.as_ref()).ok()?
}

fn read_raw_message(&self, message_key: &MessageKey) -> Option<Vec<u8>> {
fn read_raw_message(&mut self, message_key: &MessageKey) -> Option<Vec<u8>> {
let storage_message_key = bp_messages::storage_keys::message_key(
B::BRIDGED_MESSAGES_PALLET_NAME,
&message_key.lane_id,
Expand Down Expand Up @@ -928,7 +942,35 @@ mod tests {
);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
}),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageRootMismatch)),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageProof(
StorageProofError::StorageRootMismatch
))),
);
}

#[test]
fn message_proof_is_rejected_if_it_has_duplicate_trie_nodes() {
assert_eq!(
using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| {
let node = proof.storage_proof.pop().unwrap();
proof.storage_proof.push(node.clone());
proof.storage_proof.push(node);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
},),
Err(target::MessageProofError::HeaderChain(HeaderChainError::StorageProof(
StorageProofError::DuplicateNodesInProof
))),
);
}

#[test]
fn message_proof_is_rejected_if_it_has_unused_trie_nodes() {
assert_eq!(
using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| {
proof.storage_proof.push(vec![42]);
target::verify_messages_proof::<OnThisChainBridge>(proof, 10)
},),
Err(target::MessageProofError::StorageProof(StorageProofError::UnusedNodesInTheProof)),
);
}

Expand Down
20 changes: 8 additions & 12 deletions bin/runtime-common/src/messages_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
use crate::{
messages::{
source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof,
AccountIdOf, BridgedChain, HashOf, HasherOf, MessageBridge, RawStorageProof, ThisChain,
AccountIdOf, BridgedChain, HashOf, HasherOf, MessageBridge, ThisChain,
},
messages_generation::{
encode_all_messages, encode_lane_data, grow_trie, prepare_messages_storage_proof,
Expand All @@ -31,13 +31,15 @@ use crate::{

use bp_messages::storage_keys;
use bp_polkadot_core::parachains::ParaHash;
use bp_runtime::{record_all_trie_keys, Chain, Parachain, StorageProofSize, UnderlyingChainOf};
use bp_runtime::{
record_all_trie_keys, Chain, Parachain, RawStorageProof, StorageProofSize, UnderlyingChainOf,
};
use codec::Encode;
use frame_support::weights::Weight;
use pallet_bridge_messages::benchmarking::{MessageDeliveryProofParams, MessageProofParams};
use sp_runtime::traits::{Header, Zero};
use sp_std::prelude::*;
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Prepare proof of messages for the `receive_messages_proof` call.
///
Expand Down Expand Up @@ -209,15 +211,9 @@ where
root = grow_trie(root, &mut mdb, params.size);

// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<HasherOf<BridgedChain<B>>>>::new();
record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(
&mdb,
&root,
&mut proof_recorder,
)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();
let storage_proof = record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");

(root, storage_proof)
}
Expand Down
23 changes: 8 additions & 15 deletions bin/runtime-common/src/messages_generation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@

#![cfg(any(feature = "runtime-benchmarks", test))]

use crate::messages::{BridgedChain, HashOf, HasherOf, MessageBridge, RawStorageProof};
use crate::messages::{BridgedChain, HashOf, HasherOf, MessageBridge};

use bp_messages::{
storage_keys, LaneId, MessageKey, MessageNonce, MessagePayload, OutboundLaneData,
};
use bp_runtime::{record_all_trie_keys, StorageProofSize};
use bp_runtime::{record_all_trie_keys, RawStorageProof, StorageProofSize};
use codec::Encode;
use sp_core::Hasher;
use sp_std::{ops::RangeInclusive, prelude::*};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Simple and correct message data encode function.
pub(crate) fn encode_all_messages(_: MessageNonce, m: &MessagePayload) -> Option<Vec<u8>> {
Expand Down Expand Up @@ -97,15 +97,9 @@ where
root = grow_trie(root, &mut mdb, size);

// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<HasherOf<BridgedChain<B>>>>::new();
record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(
&mdb,
&root,
&mut proof_recorder,
)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let storage_proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();
let storage_proof = record_all_trie_keys::<LayoutV1<HasherOf<BridgedChain<B>>>, _>(&mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");

(root, storage_proof)
}
Expand All @@ -125,11 +119,10 @@ pub fn grow_trie<H: Hasher>(
let mut key_index = 0;
loop {
// generate storage proof to be delivered to This chain
let mut proof_recorder = Recorder::<LayoutV1<H>>::new();
record_all_trie_keys::<LayoutV1<H>, _>(mdb, &root, &mut proof_recorder)
let storage_proof = record_all_trie_keys::<LayoutV1<H>, _>(mdb, &root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let size: usize = proof_recorder.drain().into_iter().map(|n| n.data.len()).sum();
let size: usize = storage_proof.iter().map(|n| n.len()).sum();
if size > minimal_trie_size as _ {
return root
}
Expand Down
6 changes: 2 additions & 4 deletions bin/runtime-common/src/parachains_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use codec::Encode;
use frame_support::traits::Get;
use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber};
use sp_std::prelude::*;
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, Recorder, TrieMut};
use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut};

/// Prepare proof of messages for the `receive_messages_proof` call.
///
Expand Down Expand Up @@ -72,11 +72,9 @@ where
state_root = grow_trie(state_root, &mut mdb, size);

// generate heads storage proof
let mut proof_recorder = Recorder::<LayoutV1<RelayBlockHasher>>::new();
record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &state_root, &mut proof_recorder)
let proof = record_all_trie_keys::<LayoutV1<RelayBlockHasher>, _>(&mdb, &state_root)
.map_err(|_| "record_all_trie_keys has failed")
.expect("record_all_trie_keys should not fail in benchmarks");
let proof = proof_recorder.drain().into_iter().map(|n| n.data.to_vec()).collect();

let (relay_block_number, relay_block_hash) =
insert_header_to_grandpa_pallet::<R, R::BridgesGrandpaPalletInstance>(state_root);
Expand Down
9 changes: 5 additions & 4 deletions fuzz/storage-proof/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,11 @@ use honggfuzz::fuzz;
use sp_core::{Blake2Hasher, H256};
use sp_state_machine::{backend::Backend, prove_read, InMemoryBackend};
use sp_std::vec::Vec;
use sp_trie::StorageProof;
use std::collections::HashMap;

fn craft_known_storage_proof(input_vec: Vec<(Vec<u8>, Vec<u8>)>) -> (H256, StorageProof) {
fn craft_known_storage_proof(
input_vec: Vec<(Vec<u8>, Vec<u8>)>,
) -> (H256, bp_runtime::RawStorageProof) {
let storage_proof_vec =
vec![(None, input_vec.iter().map(|x| (x.0.clone(), Some(x.1.clone()))).collect())];
log::info!("Storage proof vec {:?}", storage_proof_vec);
Expand All @@ -36,7 +37,7 @@ fn craft_known_storage_proof(input_vec: Vec<(Vec<u8>, Vec<u8>)>) -> (H256, Stora
let root = backend.storage_root(std::iter::empty(), state_version).0;
let vector_element_proof =
prove_read(backend, input_vec.iter().map(|x| x.0.as_slice())).unwrap();
(root, vector_element_proof)
(root, vector_element_proof.iter_nodes().cloned().collect())
}

fn transform_into_unique(input_vec: Vec<(Vec<u8>, Vec<u8>)>) -> Vec<(Vec<u8>, Vec<u8>)> {
Expand All @@ -58,7 +59,7 @@ fn run_fuzzer() {
}
let unique_input_vec = transform_into_unique(input_vec);
let (root, craft_known_storage_proof) = craft_known_storage_proof(unique_input_vec.clone());
let checker =
let mut checker =
<bp_runtime::StorageProofChecker<Blake2Hasher>>::new(root, craft_known_storage_proof)
.expect("Valid proof passed; qed");
for key_value_pair in unique_input_vec {
Expand Down
4 changes: 2 additions & 2 deletions modules/beefy/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn verify_authority_set<T: Config<I>, I: 'static>(
///
/// We're using 'conservative' approach here, where signatures of `2/3+1` validators are
/// required..
pub(crate) fn signatures_required<T: Config<I>, I: 'static>(validators_len: usize) -> usize {
pub(crate) fn signatures_required(validators_len: usize) -> usize {
validators_len - validators_len.saturating_sub(1) / 3
}

Expand All @@ -67,7 +67,7 @@ fn verify_signatures<T: Config<I>, I: 'static>(

// Ensure that the commitment was signed by enough authorities.
let msg = commitment.commitment.encode();
let mut missing_signatures = signatures_required::<T, I>(authority_set.len());
let mut missing_signatures = signatures_required(authority_set.len());
for (idx, (authority, maybe_sig)) in
authority_set.validators().iter().zip(commitment.signatures.iter()).enumerate()
{
Expand Down
2 changes: 1 addition & 1 deletion modules/grandpa/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1050,7 +1050,7 @@ mod tests {
assert_noop!(
Pallet::<TestRuntime>::parse_finalized_storage_proof(
Default::default(),
sp_trie::StorageProof::new(vec![]),
vec![],
|_| (),
),
bp_header_chain::HeaderChainError::UnknownHeader,
Expand Down
Loading

0 comments on commit 1bb5161

Please sign in to comment.