Skip to content

Commit

Permalink
Parachain loop metrics (paritytech#1484)
Browse files Browse the repository at this point in the history
* parachain loop metrics

* some fixes

* mini refactoring

* add tests
  • Loading branch information
svyatonik authored and serban300 committed Apr 8, 2024
1 parent 82788c4 commit 788ea16
Show file tree
Hide file tree
Showing 7 changed files with 228 additions and 22 deletions.
24 changes: 24 additions & 0 deletions bridges/modules/parachains/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,7 @@ mod tests {
use frame_support::{
assert_noop, assert_ok,
dispatch::DispatchResultWithPostInfo,
storage::generator::{StorageDoubleMap, StorageMap},
traits::{Get, OnInitialize},
weights::Weight,
};
Expand Down Expand Up @@ -810,4 +811,27 @@ mod tests {
);
});
}

#[test]
fn storage_keys_computed_properly() {
assert_eq!(
BestParaHeads::<TestRuntime>::storage_map_final_key(ParaId(42)).to_vec(),
bp_parachains::best_parachain_head_hash_storage_key_at_target("Parachains", ParaId(42))
.0,
);

assert_eq!(
ImportedParaHeads::<TestRuntime>::storage_double_map_final_key(
ParaId(42),
ParaHash::from([21u8; 32])
)
.to_vec(),
bp_parachains::imported_parachain_head_storage_key_at_target(
"Parachains",
ParaId(42),
ParaHash::from([21u8; 32])
)
.0,
);
}
}
18 changes: 17 additions & 1 deletion bridges/primitives/parachains/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub fn parachain_head_storage_key_at_source(
/// Returns runtime storage key of best known parachain head at the target chain.
///
/// The head is stored by the `pallet-bridge-parachains` pallet in the `BestParaHeads` map.
pub fn parachain_head_storage_key_at_target(
pub fn best_parachain_head_hash_storage_key_at_target(
bridge_parachains_pallet_name: &str,
para_id: ParaId,
) -> StorageKey {
Expand All @@ -66,3 +66,19 @@ pub fn parachain_head_storage_key_at_target(
&para_id.encode(),
)
}

/// Returns runtime storage key of the parachain head with given hash at the target chain.
///
/// The head is stored by the `pallet-bridge-parachains` pallet in the `ImportedParaHeads` map.
pub fn imported_parachain_head_storage_key_at_target(
bridge_parachains_pallet_name: &str,
para_id: ParaId,
head_hash: ParaHash,
) -> StorageKey {
bp_runtime::storage_double_map_final_key::<Blake2_128Concat, Blake2_128Concat>(
bridge_parachains_pallet_name,
"ImportedParaHeads",
&para_id.encode(),
&head_hash.encode(),
)
}
32 changes: 32 additions & 0 deletions bridges/primitives/runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,38 @@ pub fn storage_map_final_key<H: StorageHasher>(
StorageKey(final_key)
}

/// This is a copy of the
/// `frame_support::storage::generator::StorageDoubleMap::storage_double_map_final_key` for maps
/// based on selected hashers.
///
/// We're using it because to call `storage_double_map_final_key` directly, we need access to the
/// runtime and pallet instance, which (sometimes) is impossible.
pub fn storage_double_map_final_key<H1: StorageHasher, H2: StorageHasher>(
pallet_prefix: &str,
map_name: &str,
key1: &[u8],
key2: &[u8],
) -> StorageKey {
let key1_hashed = H1::hash(key1);
let key2_hashed = H2::hash(key2);
let pallet_prefix_hashed = frame_support::Twox128::hash(pallet_prefix.as_bytes());
let storage_prefix_hashed = frame_support::Twox128::hash(map_name.as_bytes());

let mut final_key = Vec::with_capacity(
pallet_prefix_hashed.len() +
storage_prefix_hashed.len() +
key1_hashed.as_ref().len() +
key2_hashed.as_ref().len(),
);

final_key.extend_from_slice(&pallet_prefix_hashed[..]);
final_key.extend_from_slice(&storage_prefix_hashed[..]);
final_key.extend_from_slice(key1_hashed.as_ref());
final_key.extend_from_slice(key2_hashed.as_ref());

StorageKey(final_key)
}

/// This is how a storage key of storage parameter (`parameter_types! { storage Param: bool = false;
/// }`) is computed.
///
Expand Down
22 changes: 19 additions & 3 deletions bridges/relays/lib-substrate-relay/src/parachains/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ use async_trait::async_trait;
use bp_parachains::parachain_head_storage_key_at_source;
use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId};
use codec::Decode;
use parachains_relay::parachains_loop::{ParaHashAtSource, SourceClient};
use parachains_relay::{
parachains_loop::{ParaHashAtSource, SourceClient},
parachains_loop_metrics::ParachainsLoopMetrics,
};
use relay_substrate_client::{
Chain, Client, Error as SubstrateError, HeaderIdOf, HeaderOf, RelayChain,
};
Expand Down Expand Up @@ -100,6 +103,7 @@ where
async fn parachain_head(
&self,
at_block: HeaderIdOf<P::SourceRelayChain>,
metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<ParaHashAtSource, Self::Error> {
// we don't need to support many parachains now
Expand All @@ -111,9 +115,11 @@ where
)))
}

Ok(match self.on_chain_parachain_header(at_block, para_id).await? {
let mut para_header_number_at_source = None;
let para_hash_at_source = match self.on_chain_parachain_header(at_block, para_id).await? {
Some(parachain_header) => {
let mut parachain_head = ParaHashAtSource::Some(parachain_header.hash());
para_header_number_at_source = Some(*parachain_header.number());
// never return head that is larger than requested. This way we'll never sync
// headers past `maximal_header_id`
if let Some(ref maximal_header_id) = self.maximal_header_id {
Expand All @@ -125,19 +131,29 @@ where
// we don't want this header yet => let's report previously requested
// header
parachain_head = ParaHashAtSource::Some(maximal_header_id.1);
para_header_number_at_source = Some(maximal_header_id.0);
},
Some(_) => (),
None => {
// on-demand relay has not yet asked us to sync anything let's do that
parachain_head = ParaHashAtSource::Unavailable;
para_header_number_at_source = None;
},
}
}

parachain_head
},
None => ParaHashAtSource::None,
})
};

if let (Some(metrics), Some(para_header_number_at_source)) =
(metrics, para_header_number_at_source)
{
metrics.update_best_parachain_block_at_source(para_id, para_header_number_at_source);
}

Ok(para_hash_at_source)
}

async fn prove_parachain_heads(
Expand Down
51 changes: 44 additions & 7 deletions bridges/relays/lib-substrate-relay/src/parachains/target.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,19 @@ use crate::{
};

use async_trait::async_trait;
use bp_parachains::{parachain_head_storage_key_at_target, BestParaHeadHash};
use bp_polkadot_core::parachains::{ParaHeadsProof, ParaId};
use bp_parachains::{
best_parachain_head_hash_storage_key_at_target, imported_parachain_head_storage_key_at_target,
BestParaHeadHash,
};
use bp_polkadot_core::parachains::{ParaHead, ParaHeadsProof, ParaId};
use codec::{Decode, Encode};
use parachains_relay::parachains_loop::TargetClient;
use parachains_relay::{
parachains_loop::TargetClient, parachains_loop_metrics::ParachainsLoopMetrics,
};
use relay_substrate_client::{
AccountIdOf, AccountKeyPairOf, BlockNumberOf, Chain, Client, Error as SubstrateError, HashOf,
HeaderIdOf, RelayChain, SignParam, TransactionEra, TransactionSignScheme, UnsignedTransaction,
HeaderIdOf, HeaderOf, RelayChain, SignParam, TransactionEra, TransactionSignScheme,
UnsignedTransaction,
};
use relay_utils::{relay_loop::Client as RelayClient, HeaderId};
use sp_core::{Bytes, Pair};
Expand Down Expand Up @@ -115,15 +121,46 @@ where
async fn parachain_head(
&self,
at_block: HeaderIdOf<P::TargetChain>,
metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<Option<BestParaHeadHash>, Self::Error> {
let storage_key = parachain_head_storage_key_at_target(
let best_para_head_hash_key = best_parachain_head_hash_storage_key_at_target(
P::SourceRelayChain::PARACHAINS_FINALITY_PALLET_NAME,
para_id,
);
let para_head = self.client.storage_value(storage_key, Some(at_block.1)).await?;
let best_para_head_hash: Option<BestParaHeadHash> =
self.client.storage_value(best_para_head_hash_key, Some(at_block.1)).await?;
if let (Some(metrics), &Some(ref best_para_head_hash)) = (metrics, &best_para_head_hash) {
let imported_para_head_key = imported_parachain_head_storage_key_at_target(
P::SourceRelayChain::PARACHAINS_FINALITY_PALLET_NAME,
para_id,
best_para_head_hash.head_hash,
);
let imported_para_header = self
.client
.storage_value::<ParaHead>(imported_para_head_key, Some(at_block.1))
.await?
.and_then(|h| match HeaderOf::<P::SourceParachain>::decode(&mut &h.0[..]) {
Ok(header) => Some(header),
Err(e) => {
log::error!(
target: "bridge-metrics",
"Failed to decode {} parachain header at {}: {:?}. Metric will have obsolete value",
P::SourceParachain::NAME,
P::TargetChain::NAME,
e,
);

None
},
});
if let Some(imported_para_header) = imported_para_header {
metrics
.update_best_parachain_block_at_target(para_id, *imported_para_header.number());
}
}

Ok(para_head)
Ok(best_para_head_hash)
}

async fn submit_parachain_heads_proof(
Expand Down
29 changes: 23 additions & 6 deletions bridges/relays/parachains/src/parachains_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,13 @@ pub trait SourceClient<P: ParachainsPipeline>: RelayClient {
async fn ensure_synced(&self) -> Result<bool, Self::Error>;

/// Get parachain head hash at given block.
///
/// The implementation may call `ParachainsLoopMetrics::update_best_parachain_block_at_source`
/// on provided `metrics` object to update corresponding metric value.
async fn parachain_head(
&self,
at_block: HeaderIdOf<P::SourceChain>,
metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<ParaHashAtSource, Self::Error>;

Expand All @@ -103,9 +107,13 @@ pub trait TargetClient<P: ParachainsPipeline>: RelayClient {
) -> Result<HeaderIdOf<P::SourceChain>, Self::Error>;

/// Get parachain head hash at given block.
///
/// The implementation may call `ParachainsLoopMetrics::update_best_parachain_block_at_target`
/// on provided `metrics` object to update corresponding metric value.
async fn parachain_head(
&self,
at_block: HeaderIdOf<P::TargetChain>,
metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<Option<BestParaHeadHash>, Self::Error>;

Expand Down Expand Up @@ -158,7 +166,7 @@ async fn run_until_connection_lost<P: ParachainsPipeline>(
source_client: impl SourceClient<P>,
target_client: impl TargetClient<P>,
sync_params: ParachainSyncParams,
_metrics: Option<ParachainsLoopMetrics>,
metrics: Option<ParachainsLoopMetrics>,
exit_signal: impl Future<Output = ()> + Send,
) -> Result<(), FailedClient>
where
Expand Down Expand Up @@ -213,9 +221,13 @@ where
log::warn!(target: "bridge", "Failed to read best {} block: {:?}", P::SourceChain::NAME, e);
FailedClient::Target
})?;
let heads_at_target =
read_heads_at_target(&target_client, &best_target_block, &sync_params.parachains)
.await?;
let heads_at_target = read_heads_at_target(
&target_client,
metrics.as_ref(),
&best_target_block,
&sync_params.parachains,
)
.await?;
tx_tracker = tx_tracker.take().and_then(|tx_tracker| tx_tracker.update(&heads_at_target));
if tx_tracker.is_some() {
continue
Expand All @@ -238,6 +250,7 @@ where
})?;
let heads_at_source = read_heads_at_source(
&source_client,
metrics.as_ref(),
&best_finalized_relay_block,
&sync_params.parachains,
)
Expand Down Expand Up @@ -398,12 +411,13 @@ fn is_update_required(sync_params: &ParachainSyncParams, updated_ids: &[ParaId])
/// Guarantees that the returning map will have an entry for every parachain from `parachains`.
async fn read_heads_at_source<P: ParachainsPipeline>(
source_client: &impl SourceClient<P>,
metrics: Option<&ParachainsLoopMetrics>,
at_relay_block: &HeaderIdOf<P::SourceChain>,
parachains: &[ParaId],
) -> Result<BTreeMap<ParaId, ParaHashAtSource>, FailedClient> {
let mut para_head_hashes = BTreeMap::new();
for para in parachains {
let para_head = source_client.parachain_head(*at_relay_block, *para).await;
let para_head = source_client.parachain_head(*at_relay_block, metrics, *para).await;
match para_head {
Ok(para_head) => {
para_head_hashes.insert(*para, para_head);
Expand All @@ -428,12 +442,13 @@ async fn read_heads_at_source<P: ParachainsPipeline>(
/// Guarantees that the returning map will have an entry for every parachain from `parachains`.
async fn read_heads_at_target<P: ParachainsPipeline>(
target_client: &impl TargetClient<P>,
metrics: Option<&ParachainsLoopMetrics>,
at_block: &HeaderIdOf<P::TargetChain>,
parachains: &[ParaId],
) -> Result<BTreeMap<ParaId, Option<BestParaHeadHash>>, FailedClient> {
let mut para_best_head_hashes = BTreeMap::new();
for para in parachains {
let para_best_head = target_client.parachain_head(*at_block, *para).await;
let para_best_head = target_client.parachain_head(*at_block, metrics, *para).await;
match para_best_head {
Ok(para_best_head) => {
para_best_head_hashes.insert(*para, para_best_head);
Expand Down Expand Up @@ -638,6 +653,7 @@ mod tests {
async fn parachain_head(
&self,
_at_block: HeaderIdOf<TestChain>,
_metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<ParaHashAtSource, TestError> {
match self.data.lock().await.source_heads.get(&para_id.0).cloned() {
Expand Down Expand Up @@ -684,6 +700,7 @@ mod tests {
async fn parachain_head(
&self,
_at_block: HeaderIdOf<TestChain>,
_metrics: Option<&ParachainsLoopMetrics>,
para_id: ParaId,
) -> Result<Option<BestParaHeadHash>, TestError> {
self.data.lock().await.target_heads.get(&para_id.0).cloned().transpose()
Expand Down
Loading

0 comments on commit 788ea16

Please sign in to comment.