Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: follower node sync from DA #631

Closed
wants to merge 73 commits into from
Closed
Changes from 1 commit
Commits
Show all changes
73 commits
Select commit Hold shift + click to select a range
4fe6c3a
implement l1rpcfetcher and prepare for implementing da_syncer
NazariiDenha Feb 5, 2024
25bc860
finish da_syncer, add db access, small fixes
NazariiDenha Feb 13, 2024
deed82a
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Feb 13, 2024
635f744
fix lint
NazariiDenha Feb 13, 2024
69d6c39
address comments
NazariiDenha Feb 19, 2024
8bc5a6b
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Mar 18, 2024
2bef690
poc
NazariiDenha Apr 9, 2024
b6fc6dd
update poc, preprocess blocks before inserting to generate missing he…
NazariiDenha Apr 29, 2024
794c251
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Apr 29, 2024
5f11287
refactor code to new design with pipeline
NazariiDenha May 17, 2024
885bd3e
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha May 17, 2024
6b9e837
adapt for codecv0 types
NazariiDenha May 20, 2024
c6201c3
prepare for blob dta asource
NazariiDenha May 21, 2024
27a4e51
implement blob fethcer
NazariiDenha May 24, 2024
6991da0
implement blob fethcer
NazariiDenha May 24, 2024
e483219
fixes after testing and add synced l1 height to db
NazariiDenha Jun 7, 2024
48dd5d8
Merge branch 'feat/sync-directly-from-da' of github.com:scroll-tech/g…
NazariiDenha Jun 7, 2024
eb377fe
fixes and improvements after testing
NazariiDenha Jun 11, 2024
5a48000
fixes and improvements after testing
NazariiDenha Jun 11, 2024
01e5f9c
Merge branch 'feat/sync-directly-from-da' of github.com:scroll-tech/g…
NazariiDenha Jun 11, 2024
1a84506
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Jun 11, 2024
fd6d899
update codec version
NazariiDenha Jun 11, 2024
9b929ff
lint
NazariiDenha Jun 11, 2024
9b13959
update codec dependency
NazariiDenha Jun 17, 2024
20ca22d
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Jun 17, 2024
bf1d02e
goimports
NazariiDenha Jun 17, 2024
bc9b360
small fixes
NazariiDenha Jun 18, 2024
e55416e
fix lint, turn off handler, worker, miner during da syncing
NazariiDenha Jun 18, 2024
1ff0973
remove comments
NazariiDenha Jul 2, 2024
b3726ff
support blocknative
NazariiDenha Jul 8, 2024
6e0ccdb
fix
NazariiDenha Jul 9, 2024
cc65d00
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Jul 9, 2024
49c3c15
fix lint
NazariiDenha Jul 9, 2024
e825a42
fix bug with decoding [parentbatch
NazariiDenha Jul 14, 2024
750c615
support codec v3
NazariiDenha Jul 15, 2024
e3ced13
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Jul 24, 2024
cda4058
address comments
NazariiDenha Jul 24, 2024
f3a1c5d
fix: nil pointer when enabling --da.sync=true
jonastheis Jul 24, 2024
2931099
fix: block not being inserted due to Clique verification
jonastheis Jul 25, 2024
1c36c2c
refactor: compose DA types for more cohesion, maintainability and cod…
jonastheis Jul 30, 2024
e587c98
feat: execute blocks only once
jonastheis Jul 26, 2024
9dc17cb
refactor: introduce partial header and partial block for data from DA…
jonastheis Jul 26, 2024
dbdbc88
minor cleanup
jonastheis Jul 26, 2024
26dbf42
feat: fix issue with not specifying difficulty
jonastheis Jul 29, 2024
0f8e35c
refactor: compose DA types for more cohesion, maintainability and cod…
jonastheis Jul 25, 2024
912af16
feat: implement generic min heap
jonastheis Jul 29, 2024
bdb575d
feat: use generic min heap instead of map in BatchQueue
jonastheis Jul 29, 2024
362d160
fix compile errors after rebase
jonastheis Jul 30, 2024
329dd54
disable all p2p networking
NazariiDenha Aug 4, 2024
15ec7c4
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Aug 4, 2024
d958069
chore: auto version bump [bot]
NazariiDenha Aug 4, 2024
69e1a9e
v3 finalization
NazariiDenha Aug 4, 2024
0fd2a08
feat: add shrinking map that shrinks itself after a certain number of…
jonastheis Aug 6, 2024
3f68f2c
feat: use shrinking map in batch queue to delete specific elements fr…
jonastheis Aug 6, 2024
53bdb18
feat: execute blocks only once
jonastheis Jul 26, 2024
5243d59
refactor: introduce partial header and partial block for data from DA…
jonastheis Jul 26, 2024
7018b5b
minor cleanup
jonastheis Jul 26, 2024
fe33d83
feat: fix issue with not specifying difficulty
jonastheis Jul 29, 2024
d279e83
Merge remote-tracking branch 'origin/feat/sync-directly-from-da' into…
jonastheis Aug 6, 2024
d3ca47e
feat: implement simple pipeline reset (#941)
jonastheis Aug 6, 2024
19ed11d
feat: remove changes to Clique as we're not verifying signatures when…
jonastheis Aug 6, 2024
a6461d5
feat: only request finalized block number when necessary
jonastheis Aug 6, 2024
85bbc98
minor cleanup and comments
jonastheis Aug 7, 2024
485c229
cycle over list of blob clients (#960)
NazariiDenha Aug 7, 2024
f8eadfe
Merge branch 'develop' into feat/sync-directly-from-da
0xmountaintop Aug 8, 2024
399dcfc
feat: introduce custom errors and mark RPC related errors as temporar…
jonastheis Aug 13, 2024
9e1769f
fix blob_client_list
NazariiDenha Aug 15, 2024
0649954
chore: auto version bump [bot]
NazariiDenha Aug 15, 2024
0b2fe3b
feat: enable prefetching in BuildAndWriteBlock
jonastheis Aug 16, 2024
9e3c838
(follower_node)support beacon node client as blob provider (#988)
NazariiDenha Aug 19, 2024
787c955
update codec version
NazariiDenha Aug 26, 2024
dcd9c5c
Merge branch 'develop' of github.com:scroll-tech/go-ethereum into fea…
NazariiDenha Aug 26, 2024
4a66bf3
support codec v4
NazariiDenha Aug 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
refactor: compose DA types for more cohesion, maintainability and cod…
…e reuse (#925)

* refactor: compose DA types for more cohesion, maintainability and code reuse

* feat: simultaneous L1 message sync and L1 sync from DA instead of consecutive (#929)

* feat: add exponential backoff utility

* feat: add exponential backoff to pipeline

* feat: run pipeline and L1 message sync in parallel

* Update rollup/da_syncer/syncing_pipeline.go

init waitgroup

---------

Co-authored-by: Nazarii Denha <dengaaa2002@gmail.com>

* update da v2 to use correct blob decoding method

* introduce NewCommitBatchDAV1WithBlobDecodeFunc

---------

Co-authored-by: Nazarii Denha <dengaaa2002@gmail.com>
  • Loading branch information
jonastheis and NazariiDenha authored Jul 30, 2024
commit 1c36c2ce1afae3079e4fd03bb00a25da213543b4
50 changes: 50 additions & 0 deletions common/backoff/exponential.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package backoff

import (
"math"
"math/rand"
"time"
)

type Exponential struct {
attempt int

maxJitter time.Duration

min time.Duration
max time.Duration
}

func NewExponential(minimum, maximum, maxJitter time.Duration) *Exponential {
return &Exponential{
min: minimum,
max: maximum,
maxJitter: maxJitter,
}
}

func (e *Exponential) NextDuration() time.Duration {
var jitter time.Duration
if e.maxJitter > 0 {
jitter = time.Duration(rand.Int63n(e.maxJitter.Nanoseconds()))
}

minFloat := float64(e.min)
duration := math.Pow(2, float64(e.attempt)) * minFloat

// limit at configured maximum
if duration > float64(e.max) {
duration = float64(e.max)
}

e.attempt++
return time.Duration(duration) + jitter
}

func (e *Exponential) Reset() {
e.attempt = 0
}

func (e *Exponential) Attempt() int {
return e.attempt
}
39 changes: 39 additions & 0 deletions common/backoff/exponential_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package backoff

import (
"testing"
"time"

"github.com/stretchr/testify/require"
)

func TestExponentialBackoff(t *testing.T) {
t.Run("Multiple attempts", func(t *testing.T) {
e := NewExponential(100*time.Millisecond, 10*time.Second, 0)
expectedDurations := []time.Duration{
100 * time.Millisecond,
200 * time.Millisecond,
400 * time.Millisecond,
800 * time.Millisecond,
1600 * time.Millisecond,
3200 * time.Millisecond,
6400 * time.Millisecond,
10 * time.Second, // capped at max
}
for i, expected := range expectedDurations {
require.Equal(t, expected, e.NextDuration(), "attempt %d", i)
}
})

t.Run("Jitter added", func(t *testing.T) {
e := NewExponential(1*time.Second, 10*time.Second, 1*time.Second)
duration := e.NextDuration()
require.GreaterOrEqual(t, duration, 1*time.Second)
require.Less(t, duration, 2*time.Second)
})

t.Run("Edge case: min > max", func(t *testing.T) {
e := NewExponential(10*time.Second, 5*time.Second, 0)
require.Equal(t, 5*time.Second, e.NextDuration())
})
}
20 changes: 12 additions & 8 deletions eth/backend.go
Original file line number Diff line number Diff line change
@@ -221,6 +221,18 @@ func New(stack *node.Node, config *ethconfig.Config, l1Client sync_service.EthCl
}
eth.txPool = core.NewTxPool(config.TxPool, chainConfig, eth.blockchain)

// Initialize and start DA syncing pipeline before SyncService as SyncService is blocking until all L1 messages are loaded.
// We need SyncService to load the L1 messages for DA syncing, but since both sync from last known L1 state, we can
// simply let them run simultaneously. If messages are missing in DA syncing, it will be handled by the syncing pipeline
// by waiting and retrying.
if config.EnableDASyncing {
eth.syncingPipeline, err = da_syncer.NewSyncingPipeline(context.Background(), eth.blockchain, chainConfig, eth.chainDb, l1Client, stack.Config().L1DeploymentBlock, config.DA)
if err != nil {
return nil, fmt.Errorf("cannot initialize da syncer: %w", err)
}
eth.syncingPipeline.Start()
}

// initialize and start L1 message sync service
eth.syncService, err = sync_service.NewSyncService(context.Background(), chainConfig, stack.Config(), eth.chainDb, l1Client)
if err != nil {
@@ -237,14 +249,6 @@ func New(stack *node.Node, config *ethconfig.Config, l1Client sync_service.EthCl
eth.rollupSyncService.Start()
}

if config.EnableDASyncing {
eth.syncingPipeline, err = da_syncer.NewSyncingPipeline(context.Background(), eth.blockchain, chainConfig, eth.chainDb, l1Client, stack.Config().L1DeploymentBlock, config.DA)
if err != nil {
return nil, fmt.Errorf("cannot initialize da syncer: %w", err)
}
eth.syncingPipeline.Start()
}

// Permit the downloader to use the trie cache allowance during fast sync
cacheLimit := cacheConfig.TrieCleanLimit + cacheConfig.TrieDirtyLimit + cacheConfig.SnapshotLimit
checkpoint := config.Checkpoint
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
@@ -50,7 +50,7 @@ require (
github.com/prometheus/tsdb v0.7.1
github.com/rjeczalik/notify v0.9.1
github.com/rs/cors v1.7.0
github.com/scroll-tech/da-codec v0.1.1-0.20240708084945-cb02d638c45f
github.com/scroll-tech/da-codec v0.1.1-0.20240729153040-31de3201bd1a
github.com/scroll-tech/zktrie v0.8.4
github.com/shirou/gopsutil v3.21.11+incompatible
github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
@@ -394,8 +394,8 @@ github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncj
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.1.1-0.20240708084945-cb02d638c45f h1:ZKPhn674+2AgBdIn2ZLGePsUZdM2823m2tJp+JlQf/Y=
github.com/scroll-tech/da-codec v0.1.1-0.20240708084945-cb02d638c45f/go.mod h1:O9jsbQGNnTEfyfZg7idevq6jGGSQshX70elX+TRH8vU=
github.com/scroll-tech/da-codec v0.1.1-0.20240729153040-31de3201bd1a h1:2+g6p5dxRQYFjDMBZgBJkem2HpIJdXEIlXzxreTCd4w=
github.com/scroll-tech/da-codec v0.1.1-0.20240729153040-31de3201bd1a/go.mod h1:O9jsbQGNnTEfyfZg7idevq6jGGSQshX70elX+TRH8vU=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
37 changes: 17 additions & 20 deletions rollup/da_syncer/batch_queue.go
Original file line number Diff line number Diff line change
@@ -7,27 +7,28 @@ import (

"github.com/scroll-tech/go-ethereum/core/rawdb"
"github.com/scroll-tech/go-ethereum/ethdb"
"github.com/scroll-tech/go-ethereum/rollup/da_syncer/da"
)

type BatchQueue struct {
// batches is map from batchIndex to batch blocks
batches map[uint64]DAEntry
batches map[uint64]da.Entry
DAQueue *DAQueue
db ethdb.Database
lastFinalizedBatchIndex uint64
}

func NewBatchQueue(DAQueue *DAQueue, db ethdb.Database) *BatchQueue {
return &BatchQueue{
batches: make(map[uint64]DAEntry),
batches: make(map[uint64]da.Entry),
DAQueue: DAQueue,
db: db,
lastFinalizedBatchIndex: 0,
}
}

// NextBatch finds next finalized batch and returns data, that was committed in that batch
func (bq *BatchQueue) NextBatch(ctx context.Context) (DAEntry, error) {
func (bq *BatchQueue) NextBatch(ctx context.Context) (da.Entry, error) {
if batch, ok := bq.getFinalizedBatch(); ok {
return batch, nil
}
@@ -36,18 +37,14 @@ func (bq *BatchQueue) NextBatch(ctx context.Context) (DAEntry, error) {
if err != nil {
return nil, err
}
switch daEntry := daEntry.(type) {
case *CommitBatchDAV0:
bq.batches[daEntry.BatchIndex] = daEntry
case *CommitBatchDAV1:
bq.batches[daEntry.BatchIndex] = daEntry
case *CommitBatchDAV2:
bq.batches[daEntry.BatchIndex] = daEntry
case *RevertBatchDA:
bq.deleteBatch(daEntry.BatchIndex)
case *FinalizeBatchDA:
if daEntry.BatchIndex > bq.lastFinalizedBatchIndex {
bq.lastFinalizedBatchIndex = daEntry.BatchIndex
switch daEntry.Type() {
case da.CommitBatchV0Type, da.CommitBatchV1Type, da.CommitBatchV2Type:
bq.batches[daEntry.BatchIndex()] = daEntry
case da.RevertBatchType:
bq.deleteBatch(daEntry.BatchIndex())
case da.FinalizeBatchType:
if daEntry.BatchIndex() > bq.lastFinalizedBatchIndex {
bq.lastFinalizedBatchIndex = daEntry.BatchIndex()
}
ret, ok := bq.getFinalizedBatch()
if ok {
@@ -62,7 +59,7 @@ func (bq *BatchQueue) NextBatch(ctx context.Context) (DAEntry, error) {
}

// getFinalizedBatch returns next finalized batch if there is available
func (bq *BatchQueue) getFinalizedBatch() (DAEntry, bool) {
func (bq *BatchQueue) getFinalizedBatch() (da.Entry, bool) {
if len(bq.batches) == 0 {
return nil, false
}
@@ -93,7 +90,7 @@ func (bq *BatchQueue) deleteBatch(batchIndex uint64) {
if !ok {
return
}
curBatchL1Height := batch.GetL1BlockNumber()
curBatchL1Height := batch.L1BlockNumber()
delete(bq.batches, batchIndex)
if len(bq.batches) == 0 {
rawdb.WriteDASyncedL1BlockNumber(bq.db, curBatchL1Height)
@@ -102,10 +99,10 @@ func (bq *BatchQueue) deleteBatch(batchIndex uint64) {
// we store here min height of currently loaded batches to be able to start syncing from the same place in case of restart
var minBatchL1Height uint64 = math.MaxUint64
for _, val := range bq.batches {
if val.GetL1BlockNumber() < minBatchL1Height {
minBatchL1Height = val.GetL1BlockNumber()
if val.L1BlockNumber() < minBatchL1Height {
minBatchL1Height = val.L1BlockNumber()
}
}
rawdb.WriteDASyncedL1BlockNumber(bq.db, minBatchL1Height-1)

rawdb.WriteDASyncedL1BlockNumber(bq.db, minBatchL1Height-1)
}
121 changes: 9 additions & 112 deletions rollup/da_syncer/block_queue.go
Original file line number Diff line number Diff line change
@@ -3,9 +3,9 @@ package da_syncer
import (
"context"
"fmt"
"math/big"

"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/rollup/da_syncer/da"
)

type BlockQueue struct {
@@ -37,120 +37,17 @@ func (bq *BlockQueue) getBlocksFromBatch(ctx context.Context) error {
if err != nil {
return err
}
switch daEntry := daEntry.(type) {
case *CommitBatchDAV0:
bq.blocks, err = bq.processDAV0ToBlocks(daEntry)
if err != nil {
return err
}
case *CommitBatchDAV1:
bq.blocks, err = bq.processDAV1ToBlocks(daEntry)
if err != nil {
return err
}
case *CommitBatchDAV2:
bq.blocks, err = bq.processDAV2ToBlocks(daEntry)
if err != nil {
return err
}
default:
return fmt.Errorf("unexpected type of daEntry: %T", daEntry)
}
return nil
}

func (bq *BlockQueue) processDAV0ToBlocks(daEntry *CommitBatchDAV0) ([]*types.Block, error) {
var blocks []*types.Block
l1TxPointer := 0
var curL1TxIndex uint64 = daEntry.ParentTotalL1MessagePopped
for _, chunk := range daEntry.Chunks {
for blockId, daBlock := range chunk.Blocks {
// create header
header := types.Header{
Number: big.NewInt(0).SetUint64(daBlock.BlockNumber),
Time: daBlock.Timestamp,
BaseFee: daBlock.BaseFee,
GasLimit: daBlock.GasLimit,
}
// create txs
// var txs types.Transactions
txs := make(types.Transactions, 0, daBlock.NumTransactions)
// insert l1 msgs
for l1TxPointer < len(daEntry.L1Txs) && daEntry.L1Txs[l1TxPointer].QueueIndex < curL1TxIndex+uint64(daBlock.NumL1Messages) {
l1Tx := types.NewTx(daEntry.L1Txs[l1TxPointer])
txs = append(txs, l1Tx)
l1TxPointer++
}
curL1TxIndex += uint64(daBlock.NumL1Messages)
// insert l2 txs
txs = append(txs, chunk.Transactions[blockId]...)
block := types.NewBlockWithHeader(&header).WithBody(txs, make([]*types.Header, 0))
blocks = append(blocks, block)
}
entryWithBlocks, ok := daEntry.(da.EntryWithBlocks)
// this should never happen because we only receive CommitBatch entries
if !ok {
return fmt.Errorf("unexpected type of daEntry: %T", daEntry)
}
return blocks, nil
}

func (bq *BlockQueue) processDAV1ToBlocks(daEntry *CommitBatchDAV1) ([]*types.Block, error) {
var blocks []*types.Block
l1TxPointer := 0
var curL1TxIndex uint64 = daEntry.ParentTotalL1MessagePopped
for _, chunk := range daEntry.Chunks {
for blockId, daBlock := range chunk.Blocks {
// create header
header := types.Header{
Number: big.NewInt(0).SetUint64(daBlock.BlockNumber),
Time: daBlock.Timestamp,
BaseFee: daBlock.BaseFee,
GasLimit: daBlock.GasLimit,
}
// create txs
// var txs types.Transactions
txs := make(types.Transactions, 0, daBlock.NumTransactions)
// insert l1 msgs
for l1TxPointer < len(daEntry.L1Txs) && daEntry.L1Txs[l1TxPointer].QueueIndex < curL1TxIndex+uint64(daBlock.NumL1Messages) {
l1Tx := types.NewTx(daEntry.L1Txs[l1TxPointer])
txs = append(txs, l1Tx)
l1TxPointer++
}
curL1TxIndex += uint64(daBlock.NumL1Messages)
// insert l2 txs
txs = append(txs, chunk.Transactions[blockId]...)
block := types.NewBlockWithHeader(&header).WithBody(txs, make([]*types.Header, 0))
blocks = append(blocks, block)
}
bq.blocks, err = entryWithBlocks.Blocks()
if err != nil {
return fmt.Errorf("failed to get blocks from daEntry: %w", err)
}
return blocks, nil
}

func (bq *BlockQueue) processDAV2ToBlocks(daEntry *CommitBatchDAV2) ([]*types.Block, error) {
var blocks []*types.Block
l1TxPointer := 0
var curL1TxIndex uint64 = daEntry.ParentTotalL1MessagePopped
for _, chunk := range daEntry.Chunks {
for blockId, daBlock := range chunk.Blocks {
// create header
header := types.Header{
Number: big.NewInt(0).SetUint64(daBlock.BlockNumber),
Time: daBlock.Timestamp,
BaseFee: daBlock.BaseFee,
GasLimit: daBlock.GasLimit,
}
// create txs
// var txs types.Transactions
txs := make(types.Transactions, 0, daBlock.NumTransactions)
// insert l1 msgs
for l1TxPointer < len(daEntry.L1Txs) && daEntry.L1Txs[l1TxPointer].QueueIndex < curL1TxIndex+uint64(daBlock.NumL1Messages) {
l1Tx := types.NewTx(daEntry.L1Txs[l1TxPointer])
txs = append(txs, l1Tx)
l1TxPointer++
}
curL1TxIndex += uint64(daBlock.NumL1Messages)
// insert l2 txs
txs = append(txs, chunk.Transactions[blockId]...)
block := types.NewBlockWithHeader(&header).WithBody(txs, make([]*types.Header, 0))
blocks = append(blocks, block)
}
}
return blocks, nil
return nil
}
Loading