-
Notifications
You must be signed in to change notification settings - Fork 297
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore: handle PayForBlobNamespaceID in
merge
(#1298)
Closes #1285 ## Description 1. Handle PayForBlobNamespaceID in `merge` 1. Extract `merge` to a new file 1. Extract `merge` to a new file 1. Add a unit test that uses sample block data
- Loading branch information
Showing
9 changed files
with
421 additions
and
352 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
package shares | ||
|
||
import ( | ||
"bytes" | ||
|
||
"github.com/celestiaorg/celestia-app/pkg/appconsts" | ||
"github.com/celestiaorg/rsmt2d" | ||
coretypes "github.com/tendermint/tendermint/types" | ||
) | ||
|
||
// merge extracts block data from an extended data square. | ||
// TODO: export this function | ||
func merge(eds *rsmt2d.ExtendedDataSquare) (coretypes.Data, error) { | ||
squareSize := eds.Width() / 2 | ||
|
||
// sort block data shares by namespace | ||
var ( | ||
sortedTxShares [][]byte | ||
sortedPfbTxShares [][]byte | ||
sortedBlobShares [][]byte | ||
) | ||
|
||
// iterate over each row index | ||
for x := uint(0); x < squareSize; x++ { | ||
// iterate over each share in the original data square | ||
row := eds.Row(x) | ||
|
||
for _, share := range row[:squareSize] { | ||
// sort the data of that share types via namespace | ||
nid := share[:appconsts.NamespaceSize] | ||
switch { | ||
case bytes.Equal(appconsts.TxNamespaceID, nid): | ||
sortedTxShares = append(sortedTxShares, share) | ||
case bytes.Equal(appconsts.PayForBlobNamespaceID, nid): | ||
sortedPfbTxShares = append(sortedPfbTxShares, share) | ||
case bytes.Equal(appconsts.TailPaddingNamespaceID, nid): | ||
continue | ||
|
||
// ignore unused but reserved namespaces | ||
case bytes.Compare(nid, appconsts.MaxReservedNamespace) < 1: | ||
continue | ||
|
||
// every other namespaceID should be a blob | ||
default: | ||
sortedBlobShares = append(sortedBlobShares, share) | ||
} | ||
} | ||
} | ||
|
||
// pass the raw share data to their respective parsers | ||
ordinaryTxs, err := ParseTxs(sortedTxShares) | ||
if err != nil { | ||
return coretypes.Data{}, err | ||
} | ||
pfbTxs, err := ParseTxs(sortedPfbTxShares) | ||
if err != nil { | ||
return coretypes.Data{}, err | ||
} | ||
txs := append(ordinaryTxs, pfbTxs...) | ||
|
||
blobs, err := ParseBlobs(sortedBlobShares) | ||
if err != nil { | ||
return coretypes.Data{}, err | ||
} | ||
|
||
// TODO the Data returned below does not have the correct data.hash populated. | ||
return coretypes.Data{ | ||
Txs: txs, | ||
Blobs: blobs, | ||
SquareSize: uint64(squareSize), | ||
}, nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
package shares | ||
|
||
import ( | ||
"context" | ||
_ "embed" | ||
"encoding/json" | ||
"testing" | ||
"time" | ||
|
||
"github.com/celestiaorg/celestia-app/pkg/appconsts" | ||
"github.com/celestiaorg/celestia-app/testutil/testfactory" | ||
"github.com/celestiaorg/rsmt2d" | ||
"github.com/stretchr/testify/assert" | ||
"github.com/stretchr/testify/require" | ||
tmproto "github.com/tendermint/tendermint/proto/tendermint/types" | ||
coretypes "github.com/tendermint/tendermint/types" | ||
) | ||
|
||
func TestFuzz_merge(t *testing.T) { | ||
t.Skip() | ||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute) | ||
defer cancel() | ||
for { | ||
select { | ||
case <-ctx.Done(): | ||
return | ||
default: | ||
Test_merge_randomData(t) | ||
} | ||
} | ||
} | ||
|
||
func Test_merge_randomData(t *testing.T) { | ||
type test struct { | ||
name string | ||
txCount int | ||
blobCount int | ||
maxSize int // max size of each tx or blob | ||
} | ||
|
||
tests := []test{ | ||
{"one of each random small size", 1, 1, 40}, | ||
{"one of each random large size", 1, 1, 400}, | ||
{"many of each random large size", 10, 10, 40}, | ||
{"many of each random large size", 10, 10, 400}, | ||
{"only transactions", 10, 0, 400}, | ||
{"only blobs", 0, 10, 400}, | ||
} | ||
|
||
for _, tc := range tests { | ||
t.Run(tc.name, func(t *testing.T) { | ||
data := generateRandomBlockData( | ||
tc.txCount, | ||
tc.blobCount, | ||
tc.maxSize, | ||
) | ||
|
||
shares, err := Split(data, false) | ||
require.NoError(t, err) | ||
|
||
eds, err := rsmt2d.ComputeExtendedDataSquare(ToBytes(shares), appconsts.DefaultCodec(), rsmt2d.NewDefaultTree) | ||
assert.NoError(t, err) | ||
|
||
got, err := merge(eds) | ||
assert.NoError(t, err) | ||
assert.Equal(t, data, got) | ||
}) | ||
} | ||
} | ||
|
||
func Test_merge_sampleBlock(t *testing.T) { | ||
var pb tmproto.Block | ||
err := json.Unmarshal([]byte(sampleBlock), &pb) | ||
require.NoError(t, err) | ||
|
||
b, err := coretypes.BlockFromProto(&pb) | ||
require.NoError(t, err) | ||
|
||
shares, err := Split(b.Data, false) | ||
require.NoError(t, err) | ||
|
||
eds, err := rsmt2d.ComputeExtendedDataSquare(ToBytes(shares), appconsts.DefaultCodec(), rsmt2d.NewDefaultTree) | ||
assert.NoError(t, err) | ||
|
||
got, err := merge(eds) | ||
assert.NoError(t, err) | ||
|
||
// TODO: the assertions below are a hack because the data returned by merge does | ||
// contain the same hash as the original block. Ideally this test would verify: | ||
// | ||
// assert.Equal(t, b.Data, got) | ||
// | ||
// Instead this test verifies all public fields of Data are identical. | ||
assert.Equal(t, b.Data.Txs, got.Txs) | ||
assert.Equal(t, b.Data.Blobs, got.Blobs) | ||
assert.Equal(t, b.Data.SquareSize, got.SquareSize) | ||
} | ||
|
||
// generateRandomBlockData returns randomly generated block data for testing purposes | ||
func generateRandomBlockData(txCount, blobCount, maxSize int) (data coretypes.Data) { | ||
data.Txs = testfactory.GenerateRandomlySizedTxs(txCount, maxSize) | ||
data.Blobs = testfactory.GenerateRandomlySizedBlobs(blobCount, maxSize) | ||
data.SquareSize = appconsts.DefaultMaxSquareSize | ||
return data | ||
} | ||
|
||
// this is a sample block | ||
// | ||
//go:embed "testdata/sample-block.json" | ||
var sampleBlock string |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
package shares | ||
|
||
import ( | ||
"bytes" | ||
"fmt" | ||
|
||
"github.com/celestiaorg/celestia-app/pkg/appconsts" | ||
coretypes "github.com/tendermint/tendermint/types" | ||
) | ||
|
||
// ParseTxs collects all of the transactions from the shares provided | ||
func ParseTxs(shares [][]byte) (coretypes.Txs, error) { | ||
// parse the sharse | ||
rawTxs, err := parseCompactShares(shares, appconsts.SupportedShareVersions) | ||
if err != nil { | ||
return nil, err | ||
} | ||
|
||
// convert to the Tx type | ||
txs := make(coretypes.Txs, len(rawTxs)) | ||
for i := 0; i < len(txs); i++ { | ||
txs[i] = coretypes.Tx(rawTxs[i]) | ||
} | ||
|
||
return txs, nil | ||
} | ||
|
||
// ParseBlobs collects all blobs from the shares provided | ||
func ParseBlobs(shares [][]byte) ([]coretypes.Blob, error) { | ||
blobList, err := parseSparseShares(shares, appconsts.SupportedShareVersions) | ||
if err != nil { | ||
return []coretypes.Blob{}, err | ||
} | ||
|
||
return blobList, nil | ||
} | ||
|
||
func ParseShares(rawShares [][]byte) ([]ShareSequence, error) { | ||
sequences := []ShareSequence{} | ||
currentSequence := ShareSequence{} | ||
|
||
for _, rawShare := range rawShares { | ||
share, err := NewShare(rawShare) | ||
if err != nil { | ||
return sequences, err | ||
} | ||
isStart, err := share.IsSequenceStart() | ||
if err != nil { | ||
return sequences, err | ||
} | ||
if isStart { | ||
if len(currentSequence.Shares) > 0 { | ||
sequences = append(sequences, currentSequence) | ||
} | ||
currentSequence = ShareSequence{ | ||
Shares: []Share{share}, | ||
NamespaceID: share.NamespaceID(), | ||
} | ||
} else { | ||
if !bytes.Equal(currentSequence.NamespaceID, share.NamespaceID()) { | ||
return sequences, fmt.Errorf("share sequence %v has inconsistent namespace IDs with share %v", currentSequence, share) | ||
} | ||
currentSequence.Shares = append(currentSequence.Shares, share) | ||
} | ||
} | ||
|
||
if len(currentSequence.Shares) > 0 { | ||
sequences = append(sequences, currentSequence) | ||
} | ||
|
||
for _, sequence := range sequences { | ||
if err := sequence.validSequenceLen(); err != nil { | ||
return sequences, err | ||
} | ||
} | ||
|
||
return sequences, nil | ||
} |
Oops, something went wrong.