Skip to content

Commit

Permalink
feat(consensus): additional sanity checks for the size of proposed bl…
Browse files Browse the repository at this point in the history
…ocks (cometbft#1408)

Hardens tests regarding the size of proposed blocks, namely:

- The byte size of a proposal block `Part` should be constant (`==
types.BlockPartSizeBytes`), except for the last part of a `PartSet` (`<=
types.BlockPartSizeBytes`)
- A valid `Proposal` should not enclose a `PartSet` enabling the
building of a `ProposalBlock` with size larger than the configured
`ConsensusParams.Block.MaxBytes`. Notice that building a `ProposalBlock`
larger than the allowed would fail in any case, but the proposed changes
also invalidate the associated `Proposal`.

---

#### PR checklist

- [x] Tests written/updated
- [ ] Changelog entry added in `.changelog` (we use
[unclog](https://github.com/informalsystems/unclog) to manage our
changelog)
- [ ] Updated relevant documentation (`docs/` or `spec/`) and code
comments

---------

Co-authored-by: Sergio Mena <sergio@informal.systems>
Co-authored-by: Andy Nogueira <me@andynogueira.dev>
  • Loading branch information
3 people authored Jan 26, 2024
1 parent 635d0b5 commit 28ad4d2
Show file tree
Hide file tree
Showing 11 changed files with 92 additions and 25 deletions.
8 changes: 4 additions & 4 deletions crypto/merkle/proof.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ func (e ErrInvalidProof) Unwrap() error {
// everything. This also affects the generalized proof system as
// well.
type Proof struct {
Total int64 `json:"total"` // Total number of items.
Index int64 `json:"index"` // Index of item to prove.
LeafHash []byte `json:"leaf_hash"` // Hash of item value.
Aunts [][]byte `json:"aunts"` // Hashes from leaf's sibling to a root's child.
Total int64 `json:"total"` // Total number of items.
Index int64 `json:"index"` // Index of item to prove.
LeafHash []byte `json:"leaf_hash"` // Hash of item value.
Aunts [][]byte `json:"aunts,omitempty"` // Hashes from leaf's sibling to a root's child.
}

// ProofsFromByteSlices computes inclusion proof for given items.
Expand Down
1 change: 1 addition & 0 deletions internal/consensus/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ var (
ErrAddingVote = errors.New("error adding vote")
ErrSignatureFoundInPastBlocks = errors.New("found signature from the same key")
ErrPubKeyIsNotSet = errors.New("pubkey is not set. Look for \"Can't get private validator pubkey\" errors")
ErrProposalTooManyParts = errors.New("proposal block has too many parts")
)

type ErrConsensusMessageNotRecognized struct {
Expand Down
9 changes: 9 additions & 0 deletions internal/consensus/state.go
Original file line number Diff line number Diff line change
Expand Up @@ -1988,6 +1988,15 @@ func (cs *State) defaultSetProposal(proposal *types.Proposal) error {
return ErrInvalidProposalSignature
}

// Validate the proposed block size, derived from its PartSetHeader
maxBytes := cs.state.ConsensusParams.Block.MaxBytes
if maxBytes == -1 {
maxBytes = int64(types.MaxBlockSizeBytes)
}
if int64(proposal.BlockID.PartSetHeader.Total) > (maxBytes-1)/int64(types.BlockPartSizeBytes)+1 {
return ErrProposalTooManyParts
}

proposal.Signature = p.Signature
cs.Proposal = proposal
// We don't update cs.ProposalBlockParts if it is already set.
Expand Down
16 changes: 14 additions & 2 deletions internal/consensus/state_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ func TestStateBadProposal(t *testing.T) {
}

func TestStateOversizedBlock(t *testing.T) {
const maxBytes = 2000
const maxBytes = int64(types.BlockPartSizeBytes)

for _, testCase := range []struct {
name string
Expand Down Expand Up @@ -310,14 +310,21 @@ func TestStateOversizedBlock(t *testing.T) {
totalBytes += len(part.Bytes)
}

maxBlockParts := maxBytes / int64(types.BlockPartSizeBytes)
if maxBytes > maxBlockParts*int64(types.BlockPartSizeBytes) {
maxBlockParts++
}
numBlockParts := int64(propBlockParts.Total())

if err := cs1.SetProposalAndBlock(proposal, propBlock, propBlockParts, "some peer"); err != nil {
t.Fatal(err)
}

// start the machine
startTestRound(cs1, height, round)

t.Log("Block Sizes;", "Limit", cs1.state.ConsensusParams.Block.MaxBytes, "Current", totalBytes)
t.Log("Block Sizes;", "Limit", maxBytes, "Current", totalBytes)
t.Log("Proposal Parts;", "Maximum", maxBlockParts, "Current", numBlockParts)

validateHash := propBlock.Hash()
lockedRound := int32(1)
Expand All @@ -333,6 +340,11 @@ func TestStateOversizedBlock(t *testing.T) {
ensurePrevote(voteCh, height, round)
validatePrevote(t, cs1, round, vss[0], validateHash)

// Should not accept a Proposal with too many block parts
if numBlockParts > maxBlockParts {
require.Nil(t, cs1.Proposal)
}

bps, err := propBlock.MakePartSet(partSize)
require.NoError(t, err)

Expand Down
3 changes: 1 addition & 2 deletions internal/evidence/pool_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -415,8 +415,7 @@ func initializeBlockStore(db dbm.DB, state sm.State, valAddr []byte) (*store.Blo
block := state.MakeBlock(i, test.MakeNTxs(i, 1), lastCommit.ToCommit(), nil, state.Validators.Proposer.Address)
block.Header.Time = defaultEvidenceTime.Add(time.Duration(i) * time.Minute)
block.Header.Version = cmtversion.Consensus{Block: version.BlockProtocol, App: 1}
const parts = 1
partSet, err := block.MakePartSet(parts)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion internal/state/execution_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ import (

var (
chainID = "execution_chain"
testPartSize uint32 = 65536
testPartSize uint32 = types.BlockPartSizeBytes
)

func TestApplyBlock(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion internal/state/store_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ func fillStore(t *testing.T, height int64, stateStore sm.Store, bs *store.BlockS
require.Equal(t, response1, responses)
}
b1 := state.MakeBlock(state.LastBlockHeight+1, test.MakeNTxs(state.LastBlockHeight+1, 10), new(types.Commit), nil, nil)
partSet, err := b1.MakePartSet(2)
partSet, err := b1.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
bs.SaveBlock(b1, partSet, &types.Commit{Height: state.LastBlockHeight + 1})
}
Expand Down
33 changes: 21 additions & 12 deletions internal/store/store_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package store

import (
"encoding/json"
"fmt"
"os"
"runtime/debug"
Expand Down Expand Up @@ -159,10 +160,12 @@ func TestBlockStoreSaveLoadBlock(t *testing.T) {
}
}

// save a block
block := state.MakeBlock(bs.Height()+1, nil, new(types.Commit), nil, state.Validators.GetProposer().Address)
validPartSet, err := block.MakePartSet(2)
// save a block big enough to have two block parts
txs := []types.Tx{make([]byte, types.BlockPartSizeBytes)} // TX taking one block part alone
block := state.MakeBlock(bs.Height()+1, txs, new(types.Commit), nil, state.Validators.GetProposer().Address)
validPartSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
require.GreaterOrEqual(t, validPartSet.Total(), uint32(2))
part2 := validPartSet.GetPart(1)

seenCommit := makeTestExtCommit(block.Header.Height, cmttime.Now())
Expand Down Expand Up @@ -403,7 +406,7 @@ func TestSaveBlockWithExtendedCommitPanicOnAbsentExtension(t *testing.T) {
block := state.MakeBlock(h, test.MakeNTxs(h, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)

seenCommit := makeTestExtCommit(block.Header.Height, cmttime.Now())
ps, err := block.MakePartSet(2)
ps, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
testCase.malleateCommit(seenCommit)
if testCase.shouldPanic {
Expand Down Expand Up @@ -443,7 +446,7 @@ func TestLoadBlockExtendedCommit(t *testing.T) {
h := bs.Height() + 1
block := state.MakeBlock(h, test.MakeNTxs(h, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)
seenCommit := makeTestExtCommit(block.Header.Height, cmttime.Now())
ps, err := block.MakePartSet(2)
ps, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
if testCase.saveExtended {
bs.SaveBlockWithExtendedCommit(block, ps, seenCommit)
Expand Down Expand Up @@ -472,7 +475,7 @@ func TestLoadBaseMeta(t *testing.T) {

for h := int64(1); h <= 10; h++ {
block := state.MakeBlock(h, test.MakeNTxs(h, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)
partSet, err := block.MakePartSet(2)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
seenCommit := makeTestExtCommit(h, cmttime.Now())
bs.SaveBlockWithExtendedCommit(block, partSet, seenCommit)
Expand Down Expand Up @@ -517,7 +520,7 @@ func TestLoadBlockPart(t *testing.T) {

// 3. A good block serialized and saved to the DB should be retrievable
block := state.MakeBlock(height, nil, new(types.Commit), nil, state.Validators.GetProposer().Address)
partSet, err := block.MakePartSet(2)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
part1 := partSet.GetPart(0)

Expand All @@ -528,7 +531,13 @@ func TestLoadBlockPart(t *testing.T) {
gotPart, _, panicErr := doFn(loadPart)
require.NoError(t, panicErr, "an existent and proper block should not panic")
require.Nil(t, res, "a properly saved block should return a proper block")
require.Equal(t, gotPart.(*types.Part), part1,

// Having to do this because of https://github.com/stretchr/testify/issues/1141
gotPartJSON, err := json.Marshal(gotPart.(*types.Part))
require.NoError(t, err)
part1JSON, err := json.Marshal(part1)
require.NoError(t, err)
require.JSONEq(t, string(gotPartJSON), string(part1JSON),
"expecting successful retrieval of previously saved block")
}

Expand Down Expand Up @@ -590,7 +599,7 @@ func TestPruningService(t *testing.T) {
// make more than 1000 blocks, to test batch deletions
for h := int64(1); h <= 1500; h++ {
block := state.MakeBlock(h, test.MakeNTxs(h, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)
partSet, err := block.MakePartSet(2)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
seenCommit := makeTestExtCommit(h, cmttime.Now())
bs.SaveBlockWithExtendedCommit(block, partSet, seenCommit)
Expand Down Expand Up @@ -751,7 +760,7 @@ func TestPruneBlocks(t *testing.T) {
// make more than 1000 blocks, to test batch deletions
for h := int64(1); h <= 1500; h++ {
block := state.MakeBlock(h, test.MakeNTxs(h, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)
partSet, err := block.MakePartSet(2)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
seenCommit := makeTestExtCommit(h, cmttime.Now())
bs.SaveBlockWithExtendedCommit(block, partSet, seenCommit)
Expand Down Expand Up @@ -894,7 +903,7 @@ func TestLoadBlockMetaByHash(t *testing.T) {
bs := NewBlockStore(dbm.NewMemDB())

b1 := state.MakeBlock(state.LastBlockHeight+1, test.MakeNTxs(state.LastBlockHeight+1, 10), new(types.Commit), nil, state.Validators.GetProposer().Address)
partSet, err := b1.MakePartSet(2)
partSet, err := b1.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
seenCommit := makeTestExtCommit(1, cmttime.Now())
bs.SaveBlock(b1, partSet, seenCommit.ToCommit())
Expand All @@ -911,7 +920,7 @@ func TestBlockFetchAtHeight(t *testing.T) {
require.Equal(t, int64(0), bs.Height(), "initially the height should be zero")
block := state.MakeBlock(bs.Height()+1, nil, new(types.Commit), nil, state.Validators.GetProposer().Address)

partSet, err := block.MakePartSet(2)
partSet, err := block.MakePartSet(types.BlockPartSizeBytes)
require.NoError(t, err)
seenCommit := makeTestExtCommit(block.Header.Height, cmttime.Now())
bs.SaveBlockWithExtendedCommit(block, partSet, seenCommit)
Expand Down
2 changes: 1 addition & 1 deletion types/event_bus_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ func TestEventBusPublishEventNewBlock(t *testing.T) {
}()

var ps *PartSet
ps, err = block.MakePartSet(MaxBlockSizeBytes)
ps, err = block.MakePartSet(BlockPartSizeBytes)
require.NoError(t, err)

err = eventBus.PublishEventNewBlock(EventDataNewBlock{
Expand Down
13 changes: 12 additions & 1 deletion types/part_set.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import (
var (
ErrPartSetUnexpectedIndex = errors.New("error part set unexpected index")
ErrPartSetInvalidProof = errors.New("error part set invalid proof")
ErrPartTooBig = errors.New("error part size too big")
ErrPartInvalidSize = errors.New("error inner part with invalid size")
)

type Part struct {
Expand All @@ -29,7 +31,11 @@ type Part struct {
// ValidateBasic performs basic validation.
func (part *Part) ValidateBasic() error {
if len(part.Bytes) > int(BlockPartSizeBytes) {
return fmt.Errorf("too big: %d bytes, max: %d", len(part.Bytes), BlockPartSizeBytes)
return ErrPartTooBig
}
// All parts except the last one should have the same constant size.
if int64(part.Index) < part.Proof.Total-1 && len(part.Bytes) != int(BlockPartSizeBytes) {
return ErrPartInvalidSize
}
if err := part.Proof.ValidateBasic(); err != nil {
return fmt.Errorf("wrong Proof: %w", err)
Expand Down Expand Up @@ -289,6 +295,11 @@ func (ps *PartSet) AddPart(part *Part) (bool, error) {
return false, nil
}

// The proof should be compatible with the number of parts.
if part.Proof.Total != int64(ps.total) {
return false, ErrPartSetInvalidProof
}

// Check hash proof
if part.Proof.Verify(ps.Hash(), part.Bytes) != nil {
return false, ErrPartSetInvalidProof
Expand Down
28 changes: 27 additions & 1 deletion types/part_set_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,22 @@ func TestWrongProof(t *testing.T) {
if added || err == nil {
t.Errorf("expected to fail adding a part with bad bytes.")
}

// Test adding a part with wrong proof index.
part = partSet.GetPart(2)
part.Proof.Index = 1
added, err = partSet2.AddPart(part)
if added || err == nil {
t.Errorf("expected to fail adding a part with bad proof index.")
}

// Test adding a part with wrong proof total.
part = partSet.GetPart(3)
part.Proof.Total = int64(partSet.Total() - 1)
added, err = partSet2.AddPart(part)
if added || err == nil {
t.Errorf("expected to fail adding a part with bad proof total.")
}
}

func TestPartSetHeaderValidateBasic(t *testing.T) {
Expand Down Expand Up @@ -117,9 +133,19 @@ func TestPartValidateBasic(t *testing.T) {
}{
{"Good Part", func(pt *Part) {}, false},
{"Too big part", func(pt *Part) { pt.Bytes = make([]byte, BlockPartSizeBytes+1) }, true},
{"Good small last part", func(pt *Part) {
pt.Index = 1
pt.Bytes = make([]byte, BlockPartSizeBytes-1)
pt.Proof.Total = 2
}, false},
{"Too small inner part", func(pt *Part) {
pt.Index = 0
pt.Bytes = make([]byte, BlockPartSizeBytes-1)
pt.Proof.Total = 2
}, true},
{"Too big proof", func(pt *Part) {
pt.Proof = merkle.Proof{
Total: 1,
Total: 2,
Index: 1,
LeafHash: make([]byte, 1024*1024),
}
Expand Down

0 comments on commit 28ad4d2

Please sign in to comment.