Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(coordinator): add blob bytes in batch proving task #1485

Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions common/types/message/message.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"errors"
"fmt"

"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/go-ethereum/common"
)

Expand Down Expand Up @@ -52,9 +51,10 @@ type ChunkTaskDetail struct {

// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader string `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
}

// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
Expand Down
2 changes: 1 addition & 1 deletion coordinator/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ require (
github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0
github.com/mitchellh/mapstructure v1.5.0
github.com/scroll-tech/da-codec v0.0.0-20240818122843-6901956c9910
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.9.0
Expand Down Expand Up @@ -45,7 +46,6 @@ require (
require (
github.com/google/uuid v1.6.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
)

require (
Expand Down
4 changes: 2 additions & 2 deletions coordinator/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240818122843-6901956c9910 h1:c2siFeESs8MgppUAoatnkbnQMoFTq2DPT0TEdZFPbok=
github.com/scroll-tech/da-codec v0.0.0-20240818122843-6901956c9910/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
Expand Down
86 changes: 75 additions & 11 deletions coordinator/internal/logic/provertask/batch_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
Expand Down Expand Up @@ -208,17 +210,9 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
chunkInfos = append(chunkInfos, &chunkInfo)
}

taskDetail := message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}

if hardForkName == "darwin" {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail, err := bp.getBatchTaskDetail(ctx, hardForkName, batch, chunks, chunkInfos, chunkProofs)
if err != nil {
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
}

chunkProofsBytes, err := json.Marshal(taskDetail)
Expand All @@ -241,3 +235,73 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err)
}
}

func (bp *BatchProverTask) getBatchTaskDetail(ctx context.Context, hardForkName string, dbBatch *orm.Batch, dbChunks []*orm.Chunk, chunkInfos []*message.ChunkInfo, chunkProofs []*message.ChunkProof) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}

if hardForkName != "darwin" {
return taskDetail, nil
}

chunks := make([]*encoding.Chunk, len(dbChunks))
for i, c := range dbChunks {
blocks, getErr := bp.blockOrm.GetL2BlocksInRange(ctx, c.StartBlockNumber, c.EndBlockNumber)
if getErr != nil {
return nil, fmt.Errorf("failed to get blocks in range for batch %d, chunk %d, (start: %d, end: %d): %w", dbBatch.Index, c.Index, c.StartBlockNumber, c.EndBlockNumber, getErr)
}
chunks[i] = &encoding.Chunk{Blocks: blocks}
}

dbParentBatch, getErr := bp.batchOrm.GetBatchByIndex(ctx, dbBatch.Index-1)
georgehao marked this conversation as resolved.
Show resolved Hide resolved
if getErr != nil {
return nil, fmt.Errorf("failed to get parent batch header for batch %d: %w", dbBatch.Index, getErr)
}

batchEncoding := &encoding.Batch{
Index: dbBatch.Index,
TotalL1MessagePoppedBefore: dbChunks[0].TotalL1MessagesPoppedBefore,
ParentBatchHash: common.HexToHash(dbParentBatch.Hash),
Chunks: chunks,
}

if !dbBatch.EnableEncode {
daBatch, createErr := codecv3.NewDABatch(batchEncoding)
if createErr != nil {
return nil, fmt.Errorf("failed to create DA batch (v3) for batch %d: %w", dbBatch.Index, createErr)
}
taskDetail.BlobBytes = daBatch.Blob()[:]

batchHeader, decodeErr := codecv3.NewDABatchFromBytes(dbBatch.BatchHeader)
georgehao marked this conversation as resolved.
Show resolved Hide resolved
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v3) for batch %d: %w", dbBatch.Index, decodeErr)
}

jsonData, marshalErr := json.Marshal(batchHeader)
if marshalErr != nil {
return nil, fmt.Errorf("failed to marshal batch header (v3) for batch %d: %w", dbBatch.Index, marshalErr)
}
taskDetail.BatchHeader = string(jsonData)
} else {
daBatch, createErr := codecv4.NewDABatch(batchEncoding, dbBatch.EnableEncode)
if createErr != nil {
return nil, fmt.Errorf("failed to create DA batch (v4) for batch %d: %w", dbBatch.Index, createErr)
}
taskDetail.BlobBytes = daBatch.Blob()[:]

batchHeader, decodeErr := codecv4.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v4) for batch %d: %w", dbBatch.Index, decodeErr)
}

jsonData, marshalErr := json.Marshal(batchHeader)
if marshalErr != nil {
return nil, fmt.Errorf("failed to marshal batch header (v4) for batch %d: %w", dbBatch.Index, marshalErr)
}
taskDetail.BatchHeader = string(jsonData)
}

return taskDetail, nil
}
14 changes: 14 additions & 0 deletions coordinator/internal/orm/batch.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ type Batch struct {
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
EnableEncode bool `json:"enable_encode" gorm:"column:enable_encode"`
zimpha marked this conversation as resolved.
Show resolved Hide resolved

// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
Expand Down Expand Up @@ -225,6 +226,19 @@ func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) (
return batches, nil
}

// GetBatchByIndex retrieves the batch by the given index.
func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("index = ?", index)

var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByIndex error: %w, index: %v", err, index)
}
return &batch, nil
}

// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {
Expand Down
49 changes: 49 additions & 0 deletions coordinator/internal/orm/l2_block.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,55 @@ func (o *L2Block) GetL2BlockByNumber(ctx context.Context, blockNumber uint64) (*
return &l2Block, nil
}

// GetL2BlocksInRange retrieves the L2 blocks within the specified range (inclusive).
// The range is closed, i.e., it includes both start and end block numbers.
// The returned blocks are sorted in ascending order by their block number.
func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint64, endBlockNumber uint64) ([]*encoding.Block, error) {
if startBlockNumber > endBlockNumber {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: start block number should be less than or equal to end block number, start block: %v, end block: %v", startBlockNumber, endBlockNumber)
}

db := o.db.WithContext(ctx)
db = db.Model(&L2Block{})
db = db.Select("header, transactions, withdraw_root, row_consumption")
db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber)
db = db.Order("number ASC")

var l2Blocks []L2Block
if err := db.Find(&l2Blocks).Error; err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}

// sanity check
if uint64(len(l2Blocks)) != endBlockNumber-startBlockNumber+1 {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: unexpected number of results, expected: %v, got: %v", endBlockNumber-startBlockNumber+1, len(l2Blocks))
}

var blocks []*encoding.Block
for _, v := range l2Blocks {
var block encoding.Block

if err := json.Unmarshal([]byte(v.Transactions), &block.Transactions); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}

block.Header = &gethTypes.Header{}
if err := json.Unmarshal([]byte(v.Header), block.Header); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}

block.WithdrawRoot = common.HexToHash(v.WithdrawRoot)

if err := json.Unmarshal([]byte(v.RowConsumption), &block.RowConsumption); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}

blocks = append(blocks, &block)
}

return blocks, nil
}

// InsertL2Blocks inserts l2 blocks into the "l2_block" table.
// for unit test
func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error {
Expand Down
Loading