Skip to content

Commit

Permalink
fix: make test_bad_chunk_mask work with stateless validation
Browse files Browse the repository at this point in the history
This test didn't work with stateless validation because
it didn't propagate chunk state witnesses and endorsements as needed.

I modified the test to use `TestEnv`, which allowed me to use the
exisiting functionality for propagating all the partial chunk requests,
state witnesses and endorsements.
This changed the assignment of chunk and block producers.
Previously one of the clients was a chunk producer and the
other a block producer, with their roles swapped on each block.
Now the roles are assigned based on the logic in `EpochManager`.
IMO it doesn't affect the test, in the crucial moment when the
bad chunk mask is tested, the chunk producer is different from
the block producer, just like in the original test.

The test manually produces a single chunk on shard 0 to achieve the desired
chunk mask, so I couldn't just use the default way of generating chunks during
block processing. I had to implement a function that would manually produce
the required chunk.

Refs: near#10506
  • Loading branch information
jancionear committed Feb 7, 2024
1 parent 17f85c1 commit 3c1f95d
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 61 deletions.
36 changes: 35 additions & 1 deletion chain/client/src/test_utils/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use near_network::types::HighestHeightPeerInfo;
use near_primitives::block::Block;
use near_primitives::hash::CryptoHash;
use near_primitives::merkle::{merklize, PartialMerkleTree};
use near_primitives::sharding::{EncodedShardChunk, ReedSolomonWrapper};
use near_primitives::sharding::{EncodedShardChunk, ReedSolomonWrapper, ShardChunk};
use near_primitives::stateless_validation::ChunkEndorsement;
use near_primitives::transaction::SignedTransaction;
use near_primitives::types::{BlockHeight, ShardId};
Expand Down Expand Up @@ -82,6 +82,40 @@ impl Client {
}
vec![]
}

/// Manually produce a single chunk on the given shard and send out the corresponding network messages
pub fn produce_one_chunk(&mut self, height: BlockHeight, shard_id: ShardId) -> ShardChunk {
let ProduceChunkResult {
chunk: encoded_chunk,
encoded_chunk_parts_paths: merkle_paths,
receipts,
transactions_storage_proof,
} = create_chunk_on_height_for_shard(self, height, shard_id);
let shard_chunk = self
.persist_and_distribute_encoded_chunk(
encoded_chunk,
merkle_paths,
receipts,
self.validator_signer.as_ref().unwrap().validator_id().clone(),
)
.unwrap();
let prev_block = self.chain.get_block(shard_chunk.prev_block()).unwrap();
let prev_chunk_header = Chain::get_prev_chunk_header(
self.epoch_manager.as_ref(),
&prev_block,
shard_chunk.shard_id(),
)
.unwrap();
self.send_chunk_state_witness_to_chunk_validators(
&self.epoch_manager.get_epoch_id_from_prev_block(shard_chunk.prev_block()).unwrap(),
prev_block.header(),
&prev_chunk_header,
&shard_chunk,
transactions_storage_proof,
)
.unwrap();
shard_chunk
}
}

fn create_chunk_on_height_for_shard(
Expand Down
106 changes: 46 additions & 60 deletions integration-tests/src/tests/client/process_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ use near_client::test_utils::{
setup_mock_all_validators, TestEnv,
};
use near_client::{
BlockApproval, BlockResponse, Client, GetBlockWithMerkleTree, ProcessTxResponse,
ProduceChunkResult, SetNetworkInfo,
BlockApproval, BlockResponse, GetBlockWithMerkleTree, ProcessTxResponse, ProduceChunkResult,
SetNetworkInfo,
};
use near_crypto::{InMemorySigner, KeyType, PublicKey, Signature, Signer};
use near_network::test_utils::{wait_or_panic, MockPeerManagerAdapter};
Expand Down Expand Up @@ -1256,58 +1256,40 @@ fn test_bad_orphan() {

#[test]
fn test_bad_chunk_mask() {
// TODO(#10506): Fix test to handle stateless validation
if checked_feature!("stable", StatelessValidationV0, PROTOCOL_VERSION) {
return;
}

init_test_logger();
let chain_genesis = ChainGenesis::test();
let validators = vec!["test0".parse().unwrap(), "test1".parse().unwrap()];
let mut clients: Vec<Client> = validators
.iter()
.map(|account_id| {
let vs = ValidatorSchedule::new()
.num_shards(2)
.block_producers_per_epoch(vec![validators.clone()]);
setup_client_with_synchronous_shards_manager(
create_test_store(),
vs,
Some(account_id.clone()),
false,
Arc::new(MockPeerManagerAdapter::default()).into(),
MockClientAdapterForShardsManager::default().into_sender(),
chain_genesis.clone(),
TEST_SEED,
false,
true,
)
})
.collect();
for height in 1..5 {
let block_producer = (height % 2) as usize;
let chunk_producer = ((height + 1) % 2) as usize;

let ProduceChunkResult {
chunk: encoded_chunk,
encoded_chunk_parts_paths: merkle_paths,
receipts,
..
} = create_chunk_on_height(&mut clients[chunk_producer], height);
for client in clients.iter_mut() {
client
.persist_and_distribute_encoded_chunk(
encoded_chunk.clone(),
merkle_paths.clone(),
receipts.clone(),
client.validator_signer.as_ref().unwrap().validator_id().clone(),
)
.unwrap();
// Create a TestEnv with two shards and two validators who track both shards
let accounts = vec!["test0".parse().unwrap(), "test1".parse().unwrap()];
let num_validators: u64 = accounts.len().try_into().unwrap();
let genesis = Genesis::test_sharded(accounts.clone(), num_validators, vec![num_validators; 2]);
let mut env = TestEnv::builder(ChainGenesis::new(&genesis))
.clients(accounts)
.real_epoch_managers(&genesis.config)
.nightshade_runtimes(&genesis)
.track_all_shards()
.build();

// Generate 4 blocks
for height in 1..5 {
// The test never goes past the first epoch, so EpochId(11111...) can be used for all calculations
let first_epoch_id = &EpochId::default();
let chunk_producer =
env.clients[0].epoch_manager.get_chunk_producer(&first_epoch_id, height, 0).unwrap();
let block_producer =
env.clients[0].epoch_manager.get_block_producer(&first_epoch_id, height).unwrap();

// Manually produce a single chunk on shard 0, chunk for 1 is always missing.
let shard_chunk = env.client(&chunk_producer).produce_one_chunk(height, 0);
env.process_partial_encoded_chunks();
for i in 0..env.clients.len() {
env.process_shards_manager_responses(i);
}
env.propagate_chunk_state_witnesses_and_endorsements(false);

let mut block = clients[block_producer].produce_block(height).unwrap().unwrap();
// Produce a block with a chunk on shard 0. On shard 1 the chunk is missing. chunk_mask should be [true, false]
let mut block = env.client(&block_producer).produce_block(height).unwrap().unwrap();
{
let mut chunk_header = encoded_chunk.cloned_header();
let mut chunk_header = shard_chunk.cloned_header();
*chunk_header.height_included_mut() = height;
let mut chunk_headers: Vec<_> = block.chunks().iter().cloned().collect();
chunk_headers[0] = chunk_header;
Expand All @@ -1323,21 +1305,25 @@ fn test_bad_chunk_mask() {
block.mut_header().get_mut().inner_rest.chunk_mask = vec![true, false];
let mess_with_chunk_mask = height == 4;
if mess_with_chunk_mask {
// On height 4 set the chunk_mask to an invalid value.
block.mut_header().get_mut().inner_rest.chunk_mask = vec![false, true];
// The original test made sure that block_producer is different from chunk_producer,
// so let's make sure that this is still the case using an assert.
assert_ne!(block_producer, chunk_producer);
}
block
.mut_header()
.resign(&*clients[block_producer].validator_signer.as_ref().unwrap().clone());
let res1 = clients[chunk_producer]
.process_block_test_no_produce_chunk(block.clone().into(), Provenance::NONE);
let res2 = clients[block_producer]
.process_block_test_no_produce_chunk(block.clone().into(), Provenance::NONE);
if !mess_with_chunk_mask {
res1.unwrap();
res2.unwrap();
} else {
res1.unwrap_err();
res2.unwrap_err();
.resign(&*env.client(&block_producer).validator_signer.as_ref().unwrap().clone());

for client in env.clients.iter_mut() {
let res = client
.process_block_test_no_produce_chunk(block.clone().into(), Provenance::NONE);
if !mess_with_chunk_mask {
res.unwrap();
} else {
// Processing a block with an invalid chunk_mask should fail.
res.unwrap_err();
}
}
}
}
Expand Down

0 comments on commit 3c1f95d

Please sign in to comment.